[ 576.511387] env[61215]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=61215) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 576.511749] env[61215]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=61215) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 576.511836] env[61215]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=61215) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 576.512114] env[61215]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 576.599865] env[61215]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=61215) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 576.611010] env[61215]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.011s {{(pid=61215) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 576.744632] env[61215]: INFO nova.virt.driver [None req-695235aa-c5a4-40a4-bb68-b1f22d029dfc None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 576.820761] env[61215]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 576.820949] env[61215]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.001s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 576.821078] env[61215]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=61215) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 580.004359] env[61215]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-6ac6e05d-11ea-416d-8f8d-1d8f9145d5e6 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.020350] env[61215]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=61215) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 580.020554] env[61215]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-e92ad0bd-bb11-4869-91b8-8f31afb6d019 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.047252] env[61215]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 1f762. [ 580.047444] env[61215]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.226s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 580.047922] env[61215]: INFO nova.virt.vmwareapi.driver [None req-695235aa-c5a4-40a4-bb68-b1f22d029dfc None None] VMware vCenter version: 7.0.3 [ 580.051386] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0035310-1daf-4a5c-a348-313aeb384294 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.072648] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebca55a3-e4ca-4775-b817-83faae911304 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.078468] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e40980a-b6ab-487b-b387-e7169ea651b8 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.085030] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e64c05d4-861a-4133-a0d0-d4a3d182166d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.097945] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-591283b7-387e-47f3-bbe6-ca139a331097 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.104329] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-237d817a-ca0d-4f32-8c5b-58959c7862ff {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.134945] env[61215]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-23d12741-c67a-4538-b923-f78d61143c1e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.140526] env[61215]: DEBUG nova.virt.vmwareapi.driver [None req-695235aa-c5a4-40a4-bb68-b1f22d029dfc None None] Extension org.openstack.compute already exists. {{(pid=61215) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:224}} [ 580.143217] env[61215]: INFO nova.compute.provider_config [None req-695235aa-c5a4-40a4-bb68-b1f22d029dfc None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 580.163452] env[61215]: DEBUG nova.context [None req-695235aa-c5a4-40a4-bb68-b1f22d029dfc None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),3d5db0af-1c33-47e0-91d2-2cbaed80461c(cell1) {{(pid=61215) load_cells /opt/stack/nova/nova/context.py:464}} [ 580.165390] env[61215]: DEBUG oslo_concurrency.lockutils [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 580.165614] env[61215]: DEBUG oslo_concurrency.lockutils [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 580.166333] env[61215]: DEBUG oslo_concurrency.lockutils [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 580.166757] env[61215]: DEBUG oslo_concurrency.lockutils [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] Acquiring lock "3d5db0af-1c33-47e0-91d2-2cbaed80461c" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 580.166951] env[61215]: DEBUG oslo_concurrency.lockutils [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] Lock "3d5db0af-1c33-47e0-91d2-2cbaed80461c" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 580.167976] env[61215]: DEBUG oslo_concurrency.lockutils [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] Lock "3d5db0af-1c33-47e0-91d2-2cbaed80461c" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 580.187997] env[61215]: INFO dbcounter [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] Registered counter for database nova_cell0 [ 580.196342] env[61215]: INFO dbcounter [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] Registered counter for database nova_cell1 [ 580.199369] env[61215]: DEBUG oslo_db.sqlalchemy.engines [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61215) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 580.199775] env[61215]: DEBUG oslo_db.sqlalchemy.engines [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61215) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 580.204204] env[61215]: DEBUG dbcounter [-] [61215] Writer thread running {{(pid=61215) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 580.204951] env[61215]: DEBUG dbcounter [-] [61215] Writer thread running {{(pid=61215) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 580.207625] env[61215]: ERROR nova.db.main.api [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 580.207625] env[61215]: result = function(*args, **kwargs) [ 580.207625] env[61215]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 580.207625] env[61215]: return func(*args, **kwargs) [ 580.207625] env[61215]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 580.207625] env[61215]: result = fn(*args, **kwargs) [ 580.207625] env[61215]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 580.207625] env[61215]: return f(*args, **kwargs) [ 580.207625] env[61215]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 580.207625] env[61215]: return db.service_get_minimum_version(context, binaries) [ 580.207625] env[61215]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 580.207625] env[61215]: _check_db_access() [ 580.207625] env[61215]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 580.207625] env[61215]: stacktrace = ''.join(traceback.format_stack()) [ 580.207625] env[61215]: [ 580.208434] env[61215]: ERROR nova.db.main.api [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 580.208434] env[61215]: result = function(*args, **kwargs) [ 580.208434] env[61215]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 580.208434] env[61215]: return func(*args, **kwargs) [ 580.208434] env[61215]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 580.208434] env[61215]: result = fn(*args, **kwargs) [ 580.208434] env[61215]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 580.208434] env[61215]: return f(*args, **kwargs) [ 580.208434] env[61215]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 580.208434] env[61215]: return db.service_get_minimum_version(context, binaries) [ 580.208434] env[61215]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 580.208434] env[61215]: _check_db_access() [ 580.208434] env[61215]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 580.208434] env[61215]: stacktrace = ''.join(traceback.format_stack()) [ 580.208434] env[61215]: [ 580.208985] env[61215]: WARNING nova.objects.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] Failed to get minimum service version for cell 3d5db0af-1c33-47e0-91d2-2cbaed80461c [ 580.208985] env[61215]: WARNING nova.objects.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 580.209398] env[61215]: DEBUG oslo_concurrency.lockutils [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] Acquiring lock "singleton_lock" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 580.209566] env[61215]: DEBUG oslo_concurrency.lockutils [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] Acquired lock "singleton_lock" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 580.209818] env[61215]: DEBUG oslo_concurrency.lockutils [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] Releasing lock "singleton_lock" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 580.210148] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] Full set of CONF: {{(pid=61215) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 580.210297] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] ******************************************************************************** {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2600}} [ 580.210429] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] Configuration options gathered from: {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2601}} [ 580.210567] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 580.210755] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2603}} [ 580.210888] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] ================================================================================ {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2605}} [ 580.211115] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] allow_resize_to_same_host = True {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.211292] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] arq_binding_timeout = 300 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.211428] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] backdoor_port = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.211556] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] backdoor_socket = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.211723] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] block_device_allocate_retries = 60 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.211887] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] block_device_allocate_retries_interval = 3 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.212072] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cert = self.pem {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.212245] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.212415] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] compute_monitors = [] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.212583] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] config_dir = [] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.212754] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] config_drive_format = iso9660 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.212889] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.213066] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] config_source = [] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.213240] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] console_host = devstack {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.213407] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] control_exchange = nova {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.213566] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cpu_allocation_ratio = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.213728] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] daemon = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.213896] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] debug = True {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.214068] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] default_access_ip_network_name = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.214239] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] default_availability_zone = nova {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.214396] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] default_ephemeral_format = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.214556] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] default_green_pool_size = 1000 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.214792] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.214961] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] default_schedule_zone = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.215130] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] disk_allocation_ratio = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.215295] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] enable_new_services = True {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.215477] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] enabled_apis = ['osapi_compute'] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.215640] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] enabled_ssl_apis = [] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.215806] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] flat_injected = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.215966] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] force_config_drive = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.216143] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] force_raw_images = True {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.216317] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] graceful_shutdown_timeout = 5 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.216768] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] heal_instance_info_cache_interval = 60 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.216768] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] host = cpu-1 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.216880] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.217029] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] initial_disk_allocation_ratio = 1.0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.217189] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] initial_ram_allocation_ratio = 1.0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.217407] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.217575] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] instance_build_timeout = 0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.217739] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] instance_delete_interval = 300 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.217927] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] instance_format = [instance: %(uuid)s] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.218123] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] instance_name_template = instance-%08x {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.218293] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] instance_usage_audit = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.218468] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] instance_usage_audit_period = month {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.218637] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.218804] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] instances_path = /opt/stack/data/nova/instances {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.219053] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] internal_service_availability_zone = internal {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.219234] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] key = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.219404] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] live_migration_retry_count = 30 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.219572] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] log_config_append = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.219745] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.219907] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] log_dir = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.220082] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] log_file = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.220218] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] log_options = True {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.220384] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] log_rotate_interval = 1 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.220554] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] log_rotate_interval_type = days {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.220722] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] log_rotation_type = none {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.220854] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.220983] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.221177] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.221344] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.221475] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.221639] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] long_rpc_timeout = 1800 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.221802] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] max_concurrent_builds = 10 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.221963] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] max_concurrent_live_migrations = 1 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.222141] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] max_concurrent_snapshots = 5 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.222302] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] max_local_block_devices = 3 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.222460] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] max_logfile_count = 30 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.222619] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] max_logfile_size_mb = 200 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.222780] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] maximum_instance_delete_attempts = 5 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.222951] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] metadata_listen = 0.0.0.0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.223134] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] metadata_listen_port = 8775 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.223307] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] metadata_workers = 2 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.223469] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] migrate_max_retries = -1 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.223638] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] mkisofs_cmd = genisoimage {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.223846] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] my_block_storage_ip = 10.180.1.21 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.223981] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] my_ip = 10.180.1.21 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.224161] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] network_allocate_retries = 0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.224340] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.224510] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] osapi_compute_listen = 0.0.0.0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.224675] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] osapi_compute_listen_port = 8774 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.224844] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] osapi_compute_unique_server_name_scope = {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.225022] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] osapi_compute_workers = 2 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.225225] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] password_length = 12 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.225358] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] periodic_enable = True {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.225512] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] periodic_fuzzy_delay = 60 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.225683] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] pointer_model = usbtablet {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.225853] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] preallocate_images = none {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.226020] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] publish_errors = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.226157] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] pybasedir = /opt/stack/nova {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.226317] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] ram_allocation_ratio = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.226480] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] rate_limit_burst = 0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.226649] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] rate_limit_except_level = CRITICAL {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.226810] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] rate_limit_interval = 0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.226971] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] reboot_timeout = 0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.227150] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] reclaim_instance_interval = 0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.227311] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] record = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.227482] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] reimage_timeout_per_gb = 60 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.227650] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] report_interval = 120 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.227813] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] rescue_timeout = 0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.228016] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] reserved_host_cpus = 0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.228200] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] reserved_host_disk_mb = 0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.228365] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] reserved_host_memory_mb = 512 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.228528] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] reserved_huge_pages = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.228689] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] resize_confirm_window = 0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.228856] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] resize_fs_using_block_device = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.229071] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] resume_guests_state_on_host_boot = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.229254] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.229424] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] rpc_response_timeout = 60 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.229587] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] run_external_periodic_tasks = True {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.229768] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] running_deleted_instance_action = reap {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.229938] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] running_deleted_instance_poll_interval = 1800 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.230116] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] running_deleted_instance_timeout = 0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.230292] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] scheduler_instance_sync_interval = 120 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.230469] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] service_down_time = 720 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.230634] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] servicegroup_driver = db {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.230801] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] shelved_offload_time = 0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.230957] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] shelved_poll_interval = 3600 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.231140] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] shutdown_timeout = 0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.231307] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] source_is_ipv6 = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.231466] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] ssl_only = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.231711] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.231879] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] sync_power_state_interval = 600 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.232059] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] sync_power_state_pool_size = 1000 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.232240] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] syslog_log_facility = LOG_USER {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.232426] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] tempdir = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.232606] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] timeout_nbd = 10 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.232795] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] transport_url = **** {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.232966] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] update_resources_interval = 0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.233147] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] use_cow_images = True {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.233313] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] use_eventlog = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.233474] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] use_journal = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.233636] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] use_json = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.233797] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] use_rootwrap_daemon = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.233958] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] use_stderr = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.234137] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] use_syslog = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.234299] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vcpu_pin_set = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.234469] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vif_plugging_is_fatal = True {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.234638] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vif_plugging_timeout = 300 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.234804] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] virt_mkfs = [] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.234968] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] volume_usage_poll_interval = 0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.235148] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] watch_log_file = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.235322] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] web = /usr/share/spice-html5 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 580.235503] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_concurrency.disable_process_locking = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.235797] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.235978] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.236163] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.236337] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.236510] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.236677] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.236864] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] api.auth_strategy = keystone {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.237044] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] api.compute_link_prefix = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.237227] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.237400] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] api.dhcp_domain = novalocal {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.237571] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] api.enable_instance_password = True {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.237736] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] api.glance_link_prefix = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.237930] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.238127] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.238297] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] api.instance_list_per_project_cells = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.238462] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] api.list_records_by_skipping_down_cells = True {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.238631] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] api.local_metadata_per_cell = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.238806] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] api.max_limit = 1000 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.239028] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] api.metadata_cache_expiration = 15 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.239224] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] api.neutron_default_tenant_id = default {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.239398] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] api.use_neutron_default_nets = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.239569] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.239736] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.239907] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.240097] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.240275] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] api.vendordata_dynamic_targets = [] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.240445] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] api.vendordata_jsonfile_path = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.240628] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.240825] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cache.backend = dogpile.cache.memcached {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.240995] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cache.backend_argument = **** {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.241187] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cache.config_prefix = cache.oslo {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.241362] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cache.dead_timeout = 60.0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.241529] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cache.debug_cache_backend = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.241696] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cache.enable_retry_client = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.241862] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cache.enable_socket_keepalive = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.242046] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cache.enabled = True {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.242219] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cache.enforce_fips_mode = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.242387] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cache.expiration_time = 600 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.242553] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cache.hashclient_retry_attempts = 2 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.242722] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cache.hashclient_retry_delay = 1.0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.242888] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cache.memcache_dead_retry = 300 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.243063] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cache.memcache_password = **** {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.243235] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.243401] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.243566] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cache.memcache_pool_maxsize = 10 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.243735] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.243901] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cache.memcache_sasl_enabled = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.244102] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.244276] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cache.memcache_socket_timeout = 1.0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.244440] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cache.memcache_username = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.244606] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cache.proxies = [] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.244768] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cache.redis_password = **** {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.244942] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cache.redis_sentinel_service_name = mymaster {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.245134] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.245311] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cache.redis_server = localhost:6379 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.245480] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cache.redis_socket_timeout = 1.0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.245638] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cache.redis_username = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.245802] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cache.retry_attempts = 2 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.245968] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cache.retry_delay = 0.0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.246152] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cache.socket_keepalive_count = 1 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.246321] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cache.socket_keepalive_idle = 1 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.246485] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cache.socket_keepalive_interval = 1 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.246645] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cache.tls_allowed_ciphers = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.246805] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cache.tls_cafile = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.246964] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cache.tls_certfile = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.247143] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cache.tls_enabled = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.247306] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cache.tls_keyfile = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.247479] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cinder.auth_section = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.247656] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cinder.auth_type = password {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.247820] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cinder.cafile = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.248037] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cinder.catalog_info = volumev3::publicURL {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.248216] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cinder.certfile = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.248387] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cinder.collect_timing = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.248554] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cinder.cross_az_attach = True {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.248721] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cinder.debug = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.248906] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cinder.endpoint_template = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.249119] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cinder.http_retries = 3 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.249297] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cinder.insecure = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.249463] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cinder.keyfile = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.249639] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cinder.os_region_name = RegionOne {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.249807] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cinder.split_loggers = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.249970] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cinder.timeout = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.250163] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.250331] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] compute.cpu_dedicated_set = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.250495] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] compute.cpu_shared_set = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.250664] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] compute.image_type_exclude_list = [] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.250832] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.251007] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] compute.max_concurrent_disk_ops = 0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.251184] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] compute.max_disk_devices_to_attach = -1 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.251352] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.251534] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.251700] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] compute.resource_provider_association_refresh = 300 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.251866] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] compute.shutdown_retry_interval = 10 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.252093] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.252297] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] conductor.workers = 2 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.252480] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] console.allowed_origins = [] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.252646] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] console.ssl_ciphers = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.252821] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] console.ssl_minimum_version = default {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.252994] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] consoleauth.enforce_session_timeout = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.253186] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] consoleauth.token_ttl = 600 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.253359] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cyborg.cafile = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.253521] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cyborg.certfile = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.253690] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cyborg.collect_timing = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.253851] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cyborg.connect_retries = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.254015] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cyborg.connect_retry_delay = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.254184] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cyborg.endpoint_override = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.254352] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cyborg.insecure = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.254539] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cyborg.keyfile = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.254676] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cyborg.max_version = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.254836] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cyborg.min_version = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.254996] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cyborg.region_name = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.255173] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cyborg.retriable_status_codes = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.255334] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cyborg.service_name = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.255503] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cyborg.service_type = accelerator {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.255688] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cyborg.split_loggers = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.255825] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cyborg.status_code_retries = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.255984] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cyborg.status_code_retry_delay = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.256162] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cyborg.timeout = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.256347] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.256512] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] cyborg.version = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.256695] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] database.backend = sqlalchemy {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.256867] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] database.connection = **** {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.257049] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] database.connection_debug = 0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.257230] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] database.connection_parameters = {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.257399] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] database.connection_recycle_time = 3600 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.257566] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] database.connection_trace = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.257731] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] database.db_inc_retry_interval = True {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.257913] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] database.db_max_retries = 20 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.258107] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] database.db_max_retry_interval = 10 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.258282] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] database.db_retry_interval = 1 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.258450] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] database.max_overflow = 50 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.258616] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] database.max_pool_size = 5 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.258786] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] database.max_retries = 10 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.259019] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.259196] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] database.mysql_wsrep_sync_wait = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.259365] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] database.pool_timeout = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.259533] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] database.retry_interval = 10 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.259697] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] database.slave_connection = **** {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.259861] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] database.sqlite_synchronous = True {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.260035] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] database.use_db_reconnect = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.260231] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] api_database.backend = sqlalchemy {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.260400] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] api_database.connection = **** {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.260576] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] api_database.connection_debug = 0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.260754] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] api_database.connection_parameters = {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.260943] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] api_database.connection_recycle_time = 3600 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.261178] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] api_database.connection_trace = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.262403] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] api_database.db_inc_retry_interval = True {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.262403] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] api_database.db_max_retries = 20 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.262403] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] api_database.db_max_retry_interval = 10 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.262403] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] api_database.db_retry_interval = 1 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.262403] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] api_database.max_overflow = 50 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.262403] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] api_database.max_pool_size = 5 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.262638] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] api_database.max_retries = 10 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.262638] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.262699] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.262882] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] api_database.pool_timeout = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.263075] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] api_database.retry_interval = 10 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.263249] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] api_database.slave_connection = **** {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.263417] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] api_database.sqlite_synchronous = True {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.263596] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] devices.enabled_mdev_types = [] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.263781] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.263952] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] ephemeral_storage_encryption.default_format = luks {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.264135] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] ephemeral_storage_encryption.enabled = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.264305] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.264479] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] glance.api_servers = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.264646] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] glance.cafile = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.264812] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] glance.certfile = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.264980] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] glance.collect_timing = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.265160] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] glance.connect_retries = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.265323] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] glance.connect_retry_delay = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.265489] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] glance.debug = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.265664] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] glance.default_trusted_certificate_ids = [] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.265830] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] glance.enable_certificate_validation = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.265985] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] glance.enable_rbd_download = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.266163] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] glance.endpoint_override = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.266335] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] glance.insecure = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.266501] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] glance.keyfile = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.266664] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] glance.max_version = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.266825] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] glance.min_version = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.266993] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] glance.num_retries = 3 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.267183] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] glance.rbd_ceph_conf = {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.267352] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] glance.rbd_connect_timeout = 5 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.267528] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] glance.rbd_pool = {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.267694] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] glance.rbd_user = {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.267871] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] glance.region_name = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.268060] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] glance.retriable_status_codes = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.268234] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] glance.service_name = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.268412] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] glance.service_type = image {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.268615] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] glance.split_loggers = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.268864] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] glance.status_code_retries = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.269125] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] glance.status_code_retry_delay = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.269372] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] glance.timeout = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.269643] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.269927] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] glance.verify_glance_signatures = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.270201] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] glance.version = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.270458] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] guestfs.debug = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.270709] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] mks.enabled = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.271225] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.271505] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] image_cache.manager_interval = 2400 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.271758] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] image_cache.precache_concurrency = 1 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.272026] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] image_cache.remove_unused_base_images = True {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.272287] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.272542] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.272807] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] image_cache.subdirectory_name = _base {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.273082] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] ironic.api_max_retries = 60 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.273338] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] ironic.api_retry_interval = 2 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.273583] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] ironic.auth_section = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.273827] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] ironic.auth_type = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.274068] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] ironic.cafile = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.274265] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] ironic.certfile = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.274444] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] ironic.collect_timing = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.274619] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] ironic.conductor_group = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.274785] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] ironic.connect_retries = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.274952] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] ironic.connect_retry_delay = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.275135] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] ironic.endpoint_override = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.275306] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] ironic.insecure = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.275471] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] ironic.keyfile = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.275651] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] ironic.max_version = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.275801] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] ironic.min_version = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.275972] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] ironic.peer_list = [] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.276148] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] ironic.region_name = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.276314] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] ironic.retriable_status_codes = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.276484] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] ironic.serial_console_state_timeout = 10 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.276647] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] ironic.service_name = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.276823] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] ironic.service_type = baremetal {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.276990] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] ironic.shard = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.277177] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] ironic.split_loggers = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.277341] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] ironic.status_code_retries = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.277502] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] ironic.status_code_retry_delay = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.277662] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] ironic.timeout = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.277845] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.278046] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] ironic.version = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.278245] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.278426] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] key_manager.fixed_key = **** {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.278617] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.278782] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] barbican.barbican_api_version = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.279017] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] barbican.barbican_endpoint = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.279305] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] barbican.barbican_endpoint_type = public {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.279494] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] barbican.barbican_region_name = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.279662] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] barbican.cafile = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.279826] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] barbican.certfile = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.279995] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] barbican.collect_timing = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.280179] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] barbican.insecure = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.280343] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] barbican.keyfile = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.280511] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] barbican.number_of_retries = 60 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.280677] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] barbican.retry_delay = 1 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.280847] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] barbican.send_service_user_token = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.281018] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] barbican.split_loggers = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.281225] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] barbican.timeout = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.281445] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] barbican.verify_ssl = True {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.281658] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] barbican.verify_ssl_path = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.281835] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] barbican_service_user.auth_section = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.282013] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] barbican_service_user.auth_type = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.282184] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] barbican_service_user.cafile = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.282345] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] barbican_service_user.certfile = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.282512] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] barbican_service_user.collect_timing = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.282676] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] barbican_service_user.insecure = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.282873] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] barbican_service_user.keyfile = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.283128] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] barbican_service_user.split_loggers = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.283317] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] barbican_service_user.timeout = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.283492] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vault.approle_role_id = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.283657] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vault.approle_secret_id = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.283821] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vault.cafile = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.283985] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vault.certfile = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.284172] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vault.collect_timing = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.284341] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vault.insecure = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.284502] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vault.keyfile = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.284679] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vault.kv_mountpoint = secret {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.284842] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vault.kv_path = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.285021] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vault.kv_version = 2 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.285187] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vault.namespace = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.285353] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vault.root_token_id = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.285521] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vault.split_loggers = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.285682] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vault.ssl_ca_crt_file = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.285844] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vault.timeout = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.286070] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vault.use_ssl = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.286188] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.286363] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] keystone.auth_section = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.286530] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] keystone.auth_type = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.286695] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] keystone.cafile = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.286856] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] keystone.certfile = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.287032] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] keystone.collect_timing = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.287204] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] keystone.connect_retries = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.287362] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] keystone.connect_retry_delay = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.287522] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] keystone.endpoint_override = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.287685] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] keystone.insecure = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.287843] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] keystone.keyfile = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.288034] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] keystone.max_version = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.288206] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] keystone.min_version = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.288367] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] keystone.region_name = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.288526] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] keystone.retriable_status_codes = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.288684] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] keystone.service_name = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.288865] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] keystone.service_type = identity {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.289082] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] keystone.split_loggers = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.289253] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] keystone.status_code_retries = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.289417] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] keystone.status_code_retry_delay = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.289580] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] keystone.timeout = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.289766] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.289933] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] keystone.version = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.290155] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.connection_uri = {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.290343] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.cpu_mode = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.290539] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.cpu_model_extra_flags = [] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.290720] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.cpu_models = [] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.290898] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.cpu_power_governor_high = performance {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.291087] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.cpu_power_governor_low = powersave {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.291259] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.cpu_power_management = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.291436] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.291606] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.device_detach_attempts = 8 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.291772] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.device_detach_timeout = 20 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.291939] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.disk_cachemodes = [] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.292116] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.disk_prefix = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.292287] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.enabled_perf_events = [] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.292454] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.file_backed_memory = 0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.292620] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.gid_maps = [] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.292782] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.hw_disk_discard = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.292942] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.hw_machine_type = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.293133] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.images_rbd_ceph_conf = {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.293307] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.293474] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.293646] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.images_rbd_glance_store_name = {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.293826] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.images_rbd_pool = rbd {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.293995] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.images_type = default {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.294172] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.images_volume_group = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.294339] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.inject_key = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.294504] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.inject_partition = -2 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.294668] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.inject_password = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.294836] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.iscsi_iface = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.295012] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.iser_use_multipath = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.295188] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.live_migration_bandwidth = 0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.295356] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.295521] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.live_migration_downtime = 500 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.295682] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.295849] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.296016] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.live_migration_inbound_addr = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.296194] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.296363] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.live_migration_permit_post_copy = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.296527] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.live_migration_scheme = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.296707] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.live_migration_timeout_action = abort {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.296881] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.live_migration_tunnelled = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.297059] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.live_migration_uri = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.297231] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.live_migration_with_native_tls = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.297396] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.max_queues = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.297565] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.297805] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.298007] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.nfs_mount_options = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.298328] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.298506] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.298678] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.num_iser_scan_tries = 5 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.298844] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.num_memory_encrypted_guests = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.299053] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.299230] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.num_pcie_ports = 0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.299403] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.num_volume_scan_tries = 5 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.299574] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.pmem_namespaces = [] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.299738] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.quobyte_client_cfg = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.300039] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.300229] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.rbd_connect_timeout = 5 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.300398] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.300567] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.300730] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.rbd_secret_uuid = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.300894] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.rbd_user = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.301076] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.301259] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.remote_filesystem_transport = ssh {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.301424] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.rescue_image_id = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.301589] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.rescue_kernel_id = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.301752] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.rescue_ramdisk_id = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.301926] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.302109] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.rx_queue_size = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.302282] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.smbfs_mount_options = {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.302558] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.302734] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.snapshot_compression = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.302903] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.snapshot_image_format = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.303143] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.303318] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.sparse_logical_volumes = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.303486] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.swtpm_enabled = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.303659] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.swtpm_group = tss {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.303830] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.swtpm_user = tss {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.304015] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.sysinfo_serial = unique {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.304189] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.tb_cache_size = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.304351] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.tx_queue_size = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.304518] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.uid_maps = [] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.304683] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.use_virtio_for_bridges = True {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.304858] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.virt_type = kvm {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.305039] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.volume_clear = zero {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.305212] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.volume_clear_size = 0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.305381] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.volume_use_multipath = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.305544] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.vzstorage_cache_path = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.305717] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.305891] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.vzstorage_mount_group = qemu {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.306071] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.vzstorage_mount_opts = [] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.306276] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.306523] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.306704] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.vzstorage_mount_user = stack {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.306875] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.307069] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] neutron.auth_section = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.307255] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] neutron.auth_type = password {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.307418] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] neutron.cafile = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.307583] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] neutron.certfile = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.307748] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] neutron.collect_timing = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.307935] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] neutron.connect_retries = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.308125] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] neutron.connect_retry_delay = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.308305] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] neutron.default_floating_pool = public {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.308468] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] neutron.endpoint_override = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.308681] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] neutron.extension_sync_interval = 600 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.308887] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] neutron.http_retries = 3 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.309075] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] neutron.insecure = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.309246] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] neutron.keyfile = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.309408] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] neutron.max_version = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.309580] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.309743] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] neutron.min_version = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.309914] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] neutron.ovs_bridge = br-int {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.310096] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] neutron.physnets = [] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.310273] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] neutron.region_name = RegionOne {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.310435] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] neutron.retriable_status_codes = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.310609] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] neutron.service_metadata_proxy = True {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.310772] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] neutron.service_name = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.310944] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] neutron.service_type = network {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.311127] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] neutron.split_loggers = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.311291] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] neutron.status_code_retries = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.311451] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] neutron.status_code_retry_delay = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.311613] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] neutron.timeout = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.311795] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.311960] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] neutron.version = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.312154] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] notifications.bdms_in_notifications = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.312337] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] notifications.default_level = INFO {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.312515] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] notifications.notification_format = unversioned {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.312684] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] notifications.notify_on_state_change = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.312861] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.313051] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] pci.alias = [] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.313230] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] pci.device_spec = [] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.313399] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] pci.report_in_placement = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.313574] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] placement.auth_section = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.313751] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] placement.auth_type = password {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.313920] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.314096] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] placement.cafile = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.314259] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] placement.certfile = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.314424] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] placement.collect_timing = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.314583] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] placement.connect_retries = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.314741] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] placement.connect_retry_delay = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.314904] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] placement.default_domain_id = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.315077] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] placement.default_domain_name = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.315243] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] placement.domain_id = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.315404] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] placement.domain_name = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.315563] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] placement.endpoint_override = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.315724] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] placement.insecure = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.315884] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] placement.keyfile = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.316055] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] placement.max_version = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.316221] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] placement.min_version = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.316393] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] placement.password = **** {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.316551] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] placement.project_domain_id = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.316718] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] placement.project_domain_name = Default {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.316897] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] placement.project_id = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.317093] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] placement.project_name = service {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.317270] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] placement.region_name = RegionOne {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.317433] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] placement.retriable_status_codes = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.317595] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] placement.service_name = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.317767] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] placement.service_type = placement {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.317964] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] placement.split_loggers = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.318146] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] placement.status_code_retries = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.318314] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] placement.status_code_retry_delay = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.318477] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] placement.system_scope = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.318638] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] placement.timeout = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.318798] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] placement.trust_id = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.318985] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] placement.user_domain_id = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.319181] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] placement.user_domain_name = Default {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.319346] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] placement.user_id = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.319522] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] placement.username = placement {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.319705] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.319870] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] placement.version = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.320062] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] quota.cores = 20 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.320237] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] quota.count_usage_from_placement = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.320415] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.320595] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] quota.injected_file_content_bytes = 10240 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.320765] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] quota.injected_file_path_length = 255 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.320934] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] quota.injected_files = 5 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.321115] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] quota.instances = 10 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.321285] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] quota.key_pairs = 100 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.321452] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] quota.metadata_items = 128 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.321620] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] quota.ram = 51200 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.321789] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] quota.recheck_quota = True {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.321961] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] quota.server_group_members = 10 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.322145] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] quota.server_groups = 10 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.322321] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.322488] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.322650] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] scheduler.image_metadata_prefilter = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.322814] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.322979] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] scheduler.max_attempts = 3 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.323161] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] scheduler.max_placement_results = 1000 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.323328] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.323493] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] scheduler.query_placement_for_image_type_support = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.323657] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.323834] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] scheduler.workers = 2 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.324027] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.324209] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.324389] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.324562] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.324728] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.324897] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.325078] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.325276] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.325449] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] filter_scheduler.host_subset_size = 1 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.325618] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.325785] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.325952] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.326137] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] filter_scheduler.isolated_hosts = [] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.326307] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] filter_scheduler.isolated_images = [] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.326472] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.326632] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.326807] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.326976] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] filter_scheduler.pci_in_placement = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.327160] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.327329] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.327495] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.327659] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.327826] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.328024] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.328206] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] filter_scheduler.track_instance_changes = True {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.328389] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.328564] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] metrics.required = True {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.328733] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] metrics.weight_multiplier = 1.0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.328928] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.329136] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] metrics.weight_setting = [] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.329458] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.329640] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] serial_console.enabled = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.329824] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] serial_console.port_range = 10000:20000 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.330007] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.330193] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.330366] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] serial_console.serialproxy_port = 6083 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.330539] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] service_user.auth_section = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.330716] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] service_user.auth_type = password {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.330881] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] service_user.cafile = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.331056] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] service_user.certfile = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.331230] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] service_user.collect_timing = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.331395] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] service_user.insecure = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.331558] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] service_user.keyfile = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.331731] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] service_user.send_service_user_token = True {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.331898] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] service_user.split_loggers = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.332075] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] service_user.timeout = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.332255] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] spice.agent_enabled = True {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.332435] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] spice.enabled = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.332751] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.332951] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.333142] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] spice.html5proxy_port = 6082 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.333314] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] spice.image_compression = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.333479] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] spice.jpeg_compression = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.333644] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] spice.playback_compression = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.333818] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] spice.server_listen = 127.0.0.1 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.333990] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.334178] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] spice.streaming_mode = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.334327] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] spice.zlib_compression = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.334494] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] upgrade_levels.baseapi = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.334668] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] upgrade_levels.compute = auto {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.334830] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] upgrade_levels.conductor = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.334993] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] upgrade_levels.scheduler = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.335215] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vendordata_dynamic_auth.auth_section = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.335393] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vendordata_dynamic_auth.auth_type = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.335557] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vendordata_dynamic_auth.cafile = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.335720] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vendordata_dynamic_auth.certfile = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.335886] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.336066] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vendordata_dynamic_auth.insecure = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.336233] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vendordata_dynamic_auth.keyfile = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.336400] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.336563] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vendordata_dynamic_auth.timeout = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.336737] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vmware.api_retry_count = 10 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.336904] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vmware.ca_file = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.337095] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vmware.cache_prefix = devstack-image-cache {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.337270] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vmware.cluster_name = testcl1 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.337439] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vmware.connection_pool_size = 10 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.337602] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vmware.console_delay_seconds = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.337773] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vmware.datastore_regex = ^datastore.* {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.338029] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.338214] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vmware.host_password = **** {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.338388] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vmware.host_port = 443 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.338561] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vmware.host_username = administrator@vsphere.local {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.338734] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vmware.insecure = True {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.338928] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vmware.integration_bridge = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.339136] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vmware.maximum_objects = 100 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.339307] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vmware.pbm_default_policy = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.339476] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vmware.pbm_enabled = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.339641] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vmware.pbm_wsdl_location = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.339813] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.339978] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vmware.serial_port_proxy_uri = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.340155] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vmware.serial_port_service_uri = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.340328] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vmware.task_poll_interval = 0.5 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.340507] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vmware.use_linked_clone = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.340681] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vmware.vnc_keymap = en-us {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.340850] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vmware.vnc_port = 5900 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.341029] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vmware.vnc_port_total = 10000 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.341226] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vnc.auth_schemes = ['none'] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.341404] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vnc.enabled = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.341698] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.341891] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.342083] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vnc.novncproxy_port = 6080 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.342267] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vnc.server_listen = 127.0.0.1 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.342446] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.342612] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vnc.vencrypt_ca_certs = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.342778] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vnc.vencrypt_client_cert = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.342938] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vnc.vencrypt_client_key = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.343138] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.343309] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] workarounds.disable_deep_image_inspection = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.343475] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.343640] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.343804] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.343968] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] workarounds.disable_rootwrap = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.344182] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] workarounds.enable_numa_live_migration = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.344313] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.344479] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.344642] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.344810] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] workarounds.libvirt_disable_apic = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.344974] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.345157] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.345323] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.345490] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.345655] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.345822] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.345986] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.346165] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.346330] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.346498] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.346686] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.346856] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] wsgi.client_socket_timeout = 900 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.347046] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] wsgi.default_pool_size = 1000 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.347261] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] wsgi.keep_alive = True {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.347443] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] wsgi.max_header_line = 16384 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.347613] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] wsgi.secure_proxy_ssl_header = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.347779] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] wsgi.ssl_ca_file = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.347969] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] wsgi.ssl_cert_file = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.348162] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] wsgi.ssl_key_file = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.348336] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] wsgi.tcp_keepidle = 600 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.348514] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.348684] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] zvm.ca_file = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.348851] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] zvm.cloud_connector_url = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.349172] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.349357] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] zvm.reachable_timeout = 300 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.349547] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_policy.enforce_new_defaults = True {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.349723] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_policy.enforce_scope = True {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.349904] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_policy.policy_default_rule = default {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.350100] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.350283] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_policy.policy_file = policy.yaml {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.350458] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.350622] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.351077] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.351077] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.351141] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.351277] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.351456] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.351632] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] profiler.connection_string = messaging:// {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.351804] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] profiler.enabled = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.351976] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] profiler.es_doc_type = notification {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.352159] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] profiler.es_scroll_size = 10000 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.352332] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] profiler.es_scroll_time = 2m {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.352500] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] profiler.filter_error_trace = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.352669] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] profiler.hmac_keys = **** {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.352838] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] profiler.sentinel_service_name = mymaster {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.353042] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] profiler.socket_timeout = 0.1 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.353238] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] profiler.trace_requests = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.353410] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] profiler.trace_sqlalchemy = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.353598] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] profiler_jaeger.process_tags = {} {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.353768] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] profiler_jaeger.service_name_prefix = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.353936] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] profiler_otlp.service_name_prefix = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.354122] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] remote_debug.host = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.354288] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] remote_debug.port = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.354471] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.354639] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.354810] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.354976] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.355161] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.355330] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.355496] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.355663] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.355831] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.356016] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.356190] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.356367] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.356555] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.356753] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.356882] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.357086] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.357286] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.357470] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.357641] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.357807] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.358016] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.358209] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.358378] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.358550] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.358720] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.358911] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.359156] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.359340] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.359516] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.359688] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_messaging_rabbit.ssl = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.359866] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.360063] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.360243] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.360419] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.360594] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_messaging_rabbit.ssl_version = {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.360762] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.360956] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.361141] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_messaging_notifications.retry = -1 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.361328] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.361505] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_messaging_notifications.transport_url = **** {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.361680] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_limit.auth_section = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.361847] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_limit.auth_type = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.362125] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_limit.cafile = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.362177] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_limit.certfile = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.362344] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_limit.collect_timing = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.362507] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_limit.connect_retries = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.362668] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_limit.connect_retry_delay = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.362829] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_limit.endpoint_id = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.362991] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_limit.endpoint_override = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.363174] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_limit.insecure = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.363336] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_limit.keyfile = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.363498] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_limit.max_version = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.363659] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_limit.min_version = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.363821] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_limit.region_name = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.363984] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_limit.retriable_status_codes = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.364164] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_limit.service_name = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.364322] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_limit.service_type = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.364485] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_limit.split_loggers = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.364646] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_limit.status_code_retries = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.364805] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_limit.status_code_retry_delay = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.364963] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_limit.timeout = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.365134] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_limit.valid_interfaces = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.365292] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_limit.version = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.365459] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_reports.file_event_handler = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.365627] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.365790] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] oslo_reports.log_dir = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.365964] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.366142] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.366307] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.366475] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.366652] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.366849] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.366971] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.367145] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vif_plug_ovs_privileged.group = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.367309] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.367478] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.367645] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.367808] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] vif_plug_ovs_privileged.user = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.368028] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] os_vif_linux_bridge.flat_interface = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.368215] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.368396] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.368571] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.368748] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.368947] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.369177] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.369358] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.369545] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.369724] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] os_vif_ovs.isolate_vif = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.369906] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.370093] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.370275] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.370451] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] os_vif_ovs.ovsdb_interface = native {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.370619] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] os_vif_ovs.per_port_bridge = False {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.370788] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] os_brick.lock_path = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.370974] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.371187] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.371365] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] privsep_osbrick.capabilities = [21] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.371531] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] privsep_osbrick.group = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.371693] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] privsep_osbrick.helper_command = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.371862] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.372048] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.372266] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] privsep_osbrick.user = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.372454] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.372620] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] nova_sys_admin.group = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.372784] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] nova_sys_admin.helper_command = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.372953] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.373138] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.373303] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] nova_sys_admin.user = None {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 580.373435] env[61215]: DEBUG oslo_service.service [None req-904263c2-ecf2-4e0a-8ec7-4ce2129d0c10 None None] ******************************************************************************** {{(pid=61215) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2624}} [ 580.373856] env[61215]: INFO nova.service [-] Starting compute node (version 0.1.0) [ 580.384224] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-48654878-6ebb-48f5-9a85-3b3821956302 None None] Getting list of instances from cluster (obj){ [ 580.384224] env[61215]: value = "domain-c8" [ 580.384224] env[61215]: _type = "ClusterComputeResource" [ 580.384224] env[61215]: } {{(pid=61215) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 580.385508] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c05b902-9b49-4cd9-9848-ca00023434b2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.394894] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-48654878-6ebb-48f5-9a85-3b3821956302 None None] Got total of 0 instances {{(pid=61215) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 580.395492] env[61215]: WARNING nova.virt.vmwareapi.driver [None req-48654878-6ebb-48f5-9a85-3b3821956302 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 580.395950] env[61215]: INFO nova.virt.node [None req-48654878-6ebb-48f5-9a85-3b3821956302 None None] Generated node identity 1329e087-aa78-44a2-9687-63a2b1b33fd5 [ 580.396195] env[61215]: INFO nova.virt.node [None req-48654878-6ebb-48f5-9a85-3b3821956302 None None] Wrote node identity 1329e087-aa78-44a2-9687-63a2b1b33fd5 to /opt/stack/data/n-cpu-1/compute_id [ 580.410378] env[61215]: WARNING nova.compute.manager [None req-48654878-6ebb-48f5-9a85-3b3821956302 None None] Compute nodes ['1329e087-aa78-44a2-9687-63a2b1b33fd5'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 580.447780] env[61215]: INFO nova.compute.manager [None req-48654878-6ebb-48f5-9a85-3b3821956302 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 580.469567] env[61215]: WARNING nova.compute.manager [None req-48654878-6ebb-48f5-9a85-3b3821956302 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 580.470058] env[61215]: DEBUG oslo_concurrency.lockutils [None req-48654878-6ebb-48f5-9a85-3b3821956302 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 580.470317] env[61215]: DEBUG oslo_concurrency.lockutils [None req-48654878-6ebb-48f5-9a85-3b3821956302 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 580.470485] env[61215]: DEBUG oslo_concurrency.lockutils [None req-48654878-6ebb-48f5-9a85-3b3821956302 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 580.470647] env[61215]: DEBUG nova.compute.resource_tracker [None req-48654878-6ebb-48f5-9a85-3b3821956302 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61215) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 580.471722] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf47823f-e521-4354-a28b-63c71d9f34e4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.480281] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9778262-1d9b-4182-94b3-25934bd6f02b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.493951] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b8753fb-2574-4f3c-8107-a5e9c177bd00 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.500029] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e9559c4-cd38-4339-b0df-e7b8e72c1f3d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.529683] env[61215]: DEBUG nova.compute.resource_tracker [None req-48654878-6ebb-48f5-9a85-3b3821956302 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181336MB free_disk=173GB free_vcpus=48 pci_devices=None {{(pid=61215) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 580.529893] env[61215]: DEBUG oslo_concurrency.lockutils [None req-48654878-6ebb-48f5-9a85-3b3821956302 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 580.530151] env[61215]: DEBUG oslo_concurrency.lockutils [None req-48654878-6ebb-48f5-9a85-3b3821956302 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 580.542062] env[61215]: WARNING nova.compute.resource_tracker [None req-48654878-6ebb-48f5-9a85-3b3821956302 None None] No compute node record for cpu-1:1329e087-aa78-44a2-9687-63a2b1b33fd5: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 1329e087-aa78-44a2-9687-63a2b1b33fd5 could not be found. [ 580.555608] env[61215]: INFO nova.compute.resource_tracker [None req-48654878-6ebb-48f5-9a85-3b3821956302 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 1329e087-aa78-44a2-9687-63a2b1b33fd5 [ 580.607541] env[61215]: DEBUG nova.compute.resource_tracker [None req-48654878-6ebb-48f5-9a85-3b3821956302 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 580.607759] env[61215]: DEBUG nova.compute.resource_tracker [None req-48654878-6ebb-48f5-9a85-3b3821956302 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 580.712717] env[61215]: INFO nova.scheduler.client.report [None req-48654878-6ebb-48f5-9a85-3b3821956302 None None] [req-ec80f931-abfd-491d-afd1-3cadc0de6b77] Created resource provider record via placement API for resource provider with UUID 1329e087-aa78-44a2-9687-63a2b1b33fd5 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 580.729969] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de268475-eacd-4eb0-8b2f-fb186c3450f4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.737349] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99746b4a-f2aa-43fa-aa16-39fed7a7620b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.765866] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49717ebb-ba5f-47c3-b65c-439fbd3456b2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.772800] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-205e5da9-989f-4b8c-a340-6cb430718099 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.785943] env[61215]: DEBUG nova.compute.provider_tree [None req-48654878-6ebb-48f5-9a85-3b3821956302 None None] Updating inventory in ProviderTree for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 580.825734] env[61215]: DEBUG nova.scheduler.client.report [None req-48654878-6ebb-48f5-9a85-3b3821956302 None None] Updated inventory for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:957}} [ 580.826054] env[61215]: DEBUG nova.compute.provider_tree [None req-48654878-6ebb-48f5-9a85-3b3821956302 None None] Updating resource provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 generation from 0 to 1 during operation: update_inventory {{(pid=61215) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 580.826157] env[61215]: DEBUG nova.compute.provider_tree [None req-48654878-6ebb-48f5-9a85-3b3821956302 None None] Updating inventory in ProviderTree for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 580.874935] env[61215]: DEBUG nova.compute.provider_tree [None req-48654878-6ebb-48f5-9a85-3b3821956302 None None] Updating resource provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 generation from 1 to 2 during operation: update_traits {{(pid=61215) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 580.892965] env[61215]: DEBUG nova.compute.resource_tracker [None req-48654878-6ebb-48f5-9a85-3b3821956302 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61215) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 580.893181] env[61215]: DEBUG oslo_concurrency.lockutils [None req-48654878-6ebb-48f5-9a85-3b3821956302 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.363s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 580.893342] env[61215]: DEBUG nova.service [None req-48654878-6ebb-48f5-9a85-3b3821956302 None None] Creating RPC server for service compute {{(pid=61215) start /opt/stack/nova/nova/service.py:182}} [ 580.908515] env[61215]: DEBUG nova.service [None req-48654878-6ebb-48f5-9a85-3b3821956302 None None] Join ServiceGroup membership for this service compute {{(pid=61215) start /opt/stack/nova/nova/service.py:199}} [ 580.908691] env[61215]: DEBUG nova.servicegroup.drivers.db [None req-48654878-6ebb-48f5-9a85-3b3821956302 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=61215) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 590.206117] env[61215]: DEBUG dbcounter [-] [61215] Writing DB stats nova_cell1:SELECT=1 {{(pid=61215) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 590.206832] env[61215]: DEBUG dbcounter [-] [61215] Writing DB stats nova_cell0:SELECT=1 {{(pid=61215) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 627.910817] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 627.921512] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Getting list of instances from cluster (obj){ [ 627.921512] env[61215]: value = "domain-c8" [ 627.921512] env[61215]: _type = "ClusterComputeResource" [ 627.921512] env[61215]: } {{(pid=61215) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 627.922620] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17fad7ee-be86-4682-8bd5-48350f42e2af {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.932104] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Got total of 0 instances {{(pid=61215) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 627.932339] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 627.932651] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Getting list of instances from cluster (obj){ [ 627.932651] env[61215]: value = "domain-c8" [ 627.932651] env[61215]: _type = "ClusterComputeResource" [ 627.932651] env[61215]: } {{(pid=61215) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 627.933530] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df637423-f923-4bfe-bf7e-e4a25180a7fb {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.941240] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Got total of 0 instances {{(pid=61215) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 636.665815] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 636.665815] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 636.665815] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Starting heal instance info cache {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 636.665815] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Rebuilding the list of instances to heal {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 636.677882] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Didn't find any instances for network info cache update. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 636.677882] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 636.677882] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 636.677882] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 636.677882] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 636.677882] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 636.678199] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 636.678199] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61215) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 636.678199] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 636.688978] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 636.691016] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 636.691016] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 636.691016] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61215) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 636.691016] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dbf8fe8-457e-4284-b2e4-766bc27fc606 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.700588] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3617723-7e04-46e2-bdc6-3a3ecd9529ce {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.714273] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c373b405-146e-4b9f-a732-67b00784854e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.722056] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35b3909e-def3-457a-a63c-9e534fe6c1ec {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.748920] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181334MB free_disk=173GB free_vcpus=48 pci_devices=None {{(pid=61215) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 636.749266] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 636.750771] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 636.781932] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 636.781932] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 636.795310] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6630ea98-5779-42f5-ab1b-8aa5598b7d1e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.803156] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ea3945a-dc93-4998-99d8-396d7f04015e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.833103] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2d96456-8407-48a6-b571-a7f938bf82b0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.840294] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-534faeca-3ef4-43f3-b49c-6ef3de5caae2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.853983] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 636.862776] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 636.862776] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61215) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 636.862776] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.113s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 696.850250] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 696.859078] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 696.859461] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 696.859763] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 696.860061] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61215) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 697.654771] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 697.655024] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 698.654641] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 698.654985] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Starting heal instance info cache {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 698.654985] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Rebuilding the list of instances to heal {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 698.666971] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Didn't find any instances for network info cache update. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 698.667179] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 698.667341] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 698.667504] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 698.676659] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 698.676914] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 698.677093] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 698.677251] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61215) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 698.678338] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f79a25c-bc75-4cd0-9bd4-0ea915d9be07 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.687201] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1584423-ceda-44ed-98e3-cb3894981cfc {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.701064] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27809a14-4bef-4264-ab2b-a1998ccc19d1 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.707308] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-127bae6f-ebbd-4bfe-b33e-4a0a22854156 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.736378] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181349MB free_disk=173GB free_vcpus=48 pci_devices=None {{(pid=61215) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 698.736517] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 698.736693] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 698.767613] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 698.767613] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 698.781026] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53129697-546a-4caa-95f0-e70343d1848e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.786596] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9afc044f-0fe0-4273-81df-18c0f7608217 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.815869] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b75522f0-b80e-4110-92b9-f7a0e08fe8cd {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.822827] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-516f0ae4-66b2-4785-8260-f67205a0c2e4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.835704] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 698.846507] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 698.846507] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61215) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 698.846507] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.109s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 756.833495] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 757.654714] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 757.654913] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61215) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 758.650941] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 758.653539] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 758.653695] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Starting heal instance info cache {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 758.653920] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Rebuilding the list of instances to heal {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 758.663017] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Didn't find any instances for network info cache update. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 758.663206] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 758.663359] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 758.663511] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 760.654659] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 760.655138] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 760.664987] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 760.665244] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 760.665384] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 760.665537] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61215) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 760.666667] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41a675d4-bded-4577-b363-c5b609c0505c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.675764] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc3e750b-4bb6-498a-91e7-4437f099ab0f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.689339] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-504eb6f4-3c02-4326-8959-c775c8a7ce5b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.695470] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4ad5e7d-4193-450f-ad2f-9b327b00a784 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.725204] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181335MB free_disk=173GB free_vcpus=48 pci_devices=None {{(pid=61215) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 760.725362] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 760.725521] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 760.755784] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 760.755966] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 760.768754] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e545c25-8a43-4e76-9ceb-214cb39be100 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.776343] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dc95476-eed6-4d40-b884-a1aab688bb23 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.806115] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90453015-6a23-4ead-ac49-557aabfae0d7 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.813273] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-457abacc-9117-4fcc-a8b1-4bba6c8dcd4c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.826137] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 760.834708] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 760.835895] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61215) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 760.836084] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.111s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 817.837600] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 818.654591] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 818.654812] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Starting heal instance info cache {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 818.654891] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Rebuilding the list of instances to heal {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 818.663939] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Didn't find any instances for network info cache update. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 818.664146] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 818.664315] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 819.654383] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 819.664362] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 819.664554] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 819.664695] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61215) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 820.659549] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 821.655062] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 821.664349] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 821.664647] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 821.664823] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 821.664986] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61215) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 821.666216] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c13eb7ca-22ae-41d6-9899-afe2119ab2fc {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.674740] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8aaa179-c0e4-493c-8605-f72839a96ca0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.688101] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53c4a55a-384f-48f1-83b2-cec20f884119 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.694089] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-225b572e-87a8-4828-ba46-dd48f41dd2d0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.722290] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181335MB free_disk=173GB free_vcpus=48 pci_devices=None {{(pid=61215) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 821.722458] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 821.722648] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 821.752203] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 821.752368] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 821.765123] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55bdcf53-7e3f-497a-81d5-3b96964433f8 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.772352] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc627141-1d4e-4363-923c-c3afd8129287 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.802369] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34edb9ac-6f8c-4435-9816-13a9ee598e4d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.809502] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb02c74d-0802-4832-b5b5-1b5718b806f0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.822036] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 821.829684] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 821.830879] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61215) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 821.831082] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.108s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 822.831669] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 876.656826] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 876.656826] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Cleaning up deleted instances {{(pid=61215) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11220}} [ 876.667836] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] There are 0 instances to clean {{(pid=61215) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 876.669344] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 876.669344] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Cleaning up deleted instances with incomplete migration {{(pid=61215) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11258}} [ 876.676708] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 878.683466] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 878.683879] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Starting heal instance info cache {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 878.683879] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Rebuilding the list of instances to heal {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 878.692458] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Didn't find any instances for network info cache update. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 879.654216] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 880.654607] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 880.654965] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 880.654965] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 880.655143] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 880.655289] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61215) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 882.651019] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 883.655386] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 883.655721] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 883.665581] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 883.665820] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.666011] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.666178] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61215) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 883.667274] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-167a0111-6a5b-47bb-b6bf-16e267964259 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.676631] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c1bc75b-4f49-4d51-968a-9d3abd9bbfaa {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.690333] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feb1e695-57c3-4fc7-ab2f-e400465f72e3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.696532] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5a6b71d-242c-4b32-9081-7e5efe90cd37 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.724726] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181332MB free_disk=173GB free_vcpus=48 pci_devices=None {{(pid=61215) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 883.724865] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 883.725058] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.784055] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 883.784164] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 883.800060] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Refreshing inventories for resource provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 883.812766] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Updating ProviderTree inventory for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 883.812943] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Updating inventory in ProviderTree for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 883.824239] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Refreshing aggregate associations for resource provider 1329e087-aa78-44a2-9687-63a2b1b33fd5, aggregates: None {{(pid=61215) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 883.839728] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Refreshing trait associations for resource provider 1329e087-aa78-44a2-9687-63a2b1b33fd5, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=61215) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 883.850468] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44356abf-755e-40c8-a937-b696887ea860 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.857783] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-894ec528-1341-463c-8f12-378a64379adb {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.887810] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-250086b4-be48-4c9c-a225-3a41840b19dd {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.896358] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-464791db-3a57-4c26-af08-f4b599176600 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.909011] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 883.918217] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 883.919389] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61215) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 883.919562] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.195s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 938.918747] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 938.919179] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Starting heal instance info cache {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 938.919179] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Rebuilding the list of instances to heal {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 938.928255] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Didn't find any instances for network info cache update. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 940.653869] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 940.664213] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 940.664405] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 940.664546] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 941.654635] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 942.650928] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 942.653573] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 942.653727] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61215) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 944.655154] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 944.655518] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 944.665061] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.665286] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.665457] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.665626] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61215) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 944.666735] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8f745ba-0df3-48b6-b5d7-f17d50ef8e63 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.675435] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d9ad458-83c4-41cc-bdb7-d1670bc0972d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.689724] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58c17071-66db-4711-8d75-6baefff1533c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.695887] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05c834a6-fd3d-45a1-975c-984c15b5508b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.724012] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181337MB free_disk=173GB free_vcpus=48 pci_devices=None {{(pid=61215) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 944.724183] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.724377] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.755206] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 944.755410] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 944.769772] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f63bd6df-ff5b-46fd-bcb5-fd96dd836d45 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.777151] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e41af2eb-874c-47ce-b0c6-9796fc091bdf {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.807327] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-facab1ef-6d63-416b-b379-d52a533de07f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.814521] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04cc97a9-150c-44c4-86be-5f9799499c03 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.827128] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 944.835596] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 944.836767] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61215) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 944.836941] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.113s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1000.836243] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1000.836654] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Starting heal instance info cache {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 1000.836654] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Rebuilding the list of instances to heal {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1000.845715] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Didn't find any instances for network info cache update. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 1001.654110] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1001.654348] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1001.654501] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1002.654949] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1003.650287] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1004.655026] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1004.656879] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61215) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 1005.654577] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1006.654986] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1006.665452] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1006.665663] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1006.665831] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1006.665989] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61215) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1006.667122] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6ba62c0-59e6-419b-92f4-afd8f4219273 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.675913] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09fd4c85-3398-4f0c-a582-b6200c16f299 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.689367] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-101adde4-a1ce-4347-8e22-8e17b98db304 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.695447] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc77186f-051d-4953-ac3e-613d845f2a55 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.724434] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181326MB free_disk=173GB free_vcpus=48 pci_devices=None {{(pid=61215) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1006.724576] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1006.724765] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1006.755517] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1006.755686] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1006.769422] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cefe5107-438d-4923-b4e9-048391149807 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.776741] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed4ebb63-a0ef-4d60-a42c-4787a876bd92 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.806182] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7b227fe-6be0-4856-8b21-ed1756854986 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.813388] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3be0538a-c63e-46bb-967f-7a506be5e299 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.826264] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1006.833889] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1006.835137] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61215) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1006.835316] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.111s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1061.835225] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1061.835502] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Starting heal instance info cache {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 1061.835502] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Rebuilding the list of instances to heal {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1061.844393] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Didn't find any instances for network info cache update. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 1061.844579] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1063.655042] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1063.655420] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1064.654418] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1065.650184] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1065.650545] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1065.660454] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1066.654486] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1066.654842] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61215) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 1066.654884] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1066.666790] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1066.667016] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1066.667188] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1066.667342] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61215) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1066.668457] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ec21550-5526-4ffa-8cd8-9afdafc36ca3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.677014] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-156bb997-33c6-4fd0-87e0-bbeacabd4748 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.691395] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cab20e8-1462-4c14-9410-99684871de38 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.697699] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0ba8de2-cbeb-423b-a91b-ed4b3564ae54 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.726490] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181317MB free_disk=173GB free_vcpus=48 pci_devices=None {{(pid=61215) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1066.726673] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1066.726822] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1066.758728] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1066.758903] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1066.772873] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcbb9021-1bb0-4f36-949d-a73d70bcb695 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.780356] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3a726ee-628b-47dd-95a0-2c72e1562325 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.809466] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0e815ec-e7a0-4359-b867-9dfa3ee4e483 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.816947] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cc8bebc-fb75-48fb-a743-8823c0cffcba {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.829771] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1066.837725] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1066.838892] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61215) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1066.839085] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.112s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1121.839342] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1121.839738] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Starting heal instance info cache {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 1121.839738] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Rebuilding the list of instances to heal {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1121.848409] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Didn't find any instances for network info cache update. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 1123.655069] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1124.654637] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1125.650647] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1125.653328] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1126.654138] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1126.654576] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1126.654576] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1126.664531] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1126.664729] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1126.664893] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1126.665062] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61215) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1126.666301] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81649afc-30dc-44ed-ae57-f7e6d80779a4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.674882] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffaeaebe-2671-4003-9193-29ecaddd1d2a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.688605] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d9bf7c2-8960-4e12-8a61-e5ce67a2ddf2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.694565] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6d8c7fe-756d-4631-927b-f0988c8ca4d4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.722223] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181314MB free_disk=173GB free_vcpus=48 pci_devices=None {{(pid=61215) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1126.722396] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1126.722545] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1126.754197] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1126.754197] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1126.766053] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-147e03a6-bcfa-4351-8a40-e2549977014f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.773516] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d76a2d9-fe94-4ed4-a3f0-81944e40db87 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.803271] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4e5b809-5eb0-41b8-8f5c-803374a050df {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.810731] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b65ce183-218c-4a50-bb02-d12d4604641b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.823650] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1126.832301] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1126.833503] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61215) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1126.833669] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.111s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1127.834288] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1127.834647] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61215) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 1181.655770] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1181.655770] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Cleaning up deleted instances with incomplete migration {{(pid=61215) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11258}} [ 1181.664568] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1183.672743] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1183.672743] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Starting heal instance info cache {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 1183.672743] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Rebuilding the list of instances to heal {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1183.680967] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Didn't find any instances for network info cache update. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 1184.654473] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1185.650645] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1186.649208] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1186.659290] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1186.659661] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1186.659661] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1187.654736] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1188.655201] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1188.655579] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61215) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 1188.655579] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1188.665171] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1188.665382] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1188.665549] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1188.665700] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61215) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1188.666796] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-751aec2a-d207-43dc-bb34-fcc53105c6f2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.675465] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1734d04-2c15-4b09-b2f7-3a52564421a9 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.688967] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1191a55d-991d-4387-a5e7-5947e2092e0a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.695040] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-714758ba-f4d4-4fbe-8df7-2bbc054b069f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.723169] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181311MB free_disk=173GB free_vcpus=48 pci_devices=None {{(pid=61215) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1188.723310] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1188.723499] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1188.820343] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1188.820526] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1188.837098] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Refreshing inventories for resource provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1188.849041] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Updating ProviderTree inventory for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1188.849219] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Updating inventory in ProviderTree for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1188.858590] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Refreshing aggregate associations for resource provider 1329e087-aa78-44a2-9687-63a2b1b33fd5, aggregates: None {{(pid=61215) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1188.873402] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Refreshing trait associations for resource provider 1329e087-aa78-44a2-9687-63a2b1b33fd5, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=61215) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1188.884627] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbfdda01-d156-41c9-9ffc-75c693c670ef {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.891834] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-294edb63-ad30-42f2-bcce-89c14c6d2794 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.921571] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2386aca8-1068-4804-a114-533e578bdfca {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.928862] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69dbc2cb-8b8d-45f8-b479-2ae0f3185030 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.941613] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1188.950529] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1188.951717] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61215) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1188.951900] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.228s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1189.654331] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1189.654596] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Cleaning up deleted instances {{(pid=61215) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11220}} [ 1189.666904] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] There are 0 instances to clean {{(pid=61215) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 1227.925602] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1227.934918] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Getting list of instances from cluster (obj){ [ 1227.934918] env[61215]: value = "domain-c8" [ 1227.934918] env[61215]: _type = "ClusterComputeResource" [ 1227.934918] env[61215]: } {{(pid=61215) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1227.935722] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9d000ed-bf0e-4b43-b2c7-4c50981be481 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.944636] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Got total of 0 instances {{(pid=61215) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1244.374846] env[61215]: DEBUG oslo_concurrency.lockutils [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Acquiring lock "2a605c78-bef9-44f0-bcd0-e5bd3e1b0028" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1244.375170] env[61215]: DEBUG oslo_concurrency.lockutils [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Lock "2a605c78-bef9-44f0-bcd0-e5bd3e1b0028" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1244.406678] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Acquiring lock "1eaf05ba-8235-4a68-b807-db95e65c0933" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1244.407013] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Lock "1eaf05ba-8235-4a68-b807-db95e65c0933" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1244.416113] env[61215]: DEBUG nova.compute.manager [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1244.433339] env[61215]: DEBUG nova.compute.manager [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1244.559484] env[61215]: DEBUG oslo_concurrency.lockutils [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1244.559937] env[61215]: DEBUG oslo_concurrency.lockutils [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1244.561736] env[61215]: INFO nova.compute.claims [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1244.583474] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1244.710935] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-415c1361-337f-4a90-8830-07073f9f04a7 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.720325] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d9a1e76-dacd-4e88-b5ad-25ca95cb5470 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.760852] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e245911-7957-498c-8931-9985ff034187 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.771178] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f29ee0b-18cb-4927-9457-42090347879f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.789752] env[61215]: DEBUG nova.compute.provider_tree [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1244.809765] env[61215]: DEBUG nova.scheduler.client.report [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1244.843271] env[61215]: DEBUG oslo_concurrency.lockutils [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.281s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1244.843271] env[61215]: DEBUG nova.compute.manager [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Start building networks asynchronously for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1244.847280] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.263s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1244.847864] env[61215]: INFO nova.compute.claims [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1244.912533] env[61215]: DEBUG nova.compute.utils [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Using /dev/sd instead of None {{(pid=61215) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1244.914790] env[61215]: DEBUG nova.compute.manager [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Allocating IP information in the background. {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1244.914790] env[61215]: DEBUG nova.network.neutron [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] allocate_for_instance() {{(pid=61215) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1244.933847] env[61215]: DEBUG nova.compute.manager [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Start building block device mappings for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1245.023054] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cd0fc11-dcb2-46af-9b19-f7e067fdba67 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.035405] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fa7f3cd-73a7-4895-98db-8044729679ea {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.070769] env[61215]: DEBUG nova.compute.manager [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Start spawning the instance on the hypervisor. {{(pid=61215) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1245.072621] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43d81a8b-fe5d-4ee0-b3e5-c3ebcaea76a5 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.081199] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-237bda8a-ffa9-4428-9044-baec3b31237c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.098993] env[61215]: DEBUG nova.compute.provider_tree [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1245.112332] env[61215]: DEBUG nova.scheduler.client.report [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1245.131027] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.283s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1245.131027] env[61215]: DEBUG nova.compute.manager [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Start building networks asynchronously for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1245.189331] env[61215]: DEBUG nova.compute.utils [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Using /dev/sd instead of None {{(pid=61215) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1245.193806] env[61215]: DEBUG nova.compute.manager [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Allocating IP information in the background. {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1245.194124] env[61215]: DEBUG nova.network.neutron [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] allocate_for_instance() {{(pid=61215) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1245.217302] env[61215]: DEBUG nova.compute.manager [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Start building block device mappings for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1245.328643] env[61215]: DEBUG nova.compute.manager [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Start spawning the instance on the hypervisor. {{(pid=61215) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1245.673899] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1245.673899] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Starting heal instance info cache {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 1245.674951] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Rebuilding the list of instances to heal {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1245.691541] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1245.691541] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1245.691541] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Didn't find any instances for network info cache update. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 1245.692687] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1245.739295] env[61215]: DEBUG nova.virt.hardware [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:05:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:04:45Z,direct_url=,disk_format='vmdk',id=e91f0c25-9ff9-4937-8440-f47cfb2028bc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9bc3f57c82894c5d9e08b66e77a25dc5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:04:46Z,virtual_size=,visibility=), allow threads: False {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1245.739415] env[61215]: DEBUG nova.virt.hardware [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Flavor limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1245.740602] env[61215]: DEBUG nova.virt.hardware [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Image limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1245.740602] env[61215]: DEBUG nova.virt.hardware [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Flavor pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1245.740811] env[61215]: DEBUG nova.virt.hardware [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Image pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1245.741032] env[61215]: DEBUG nova.virt.hardware [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1245.741677] env[61215]: DEBUG nova.virt.hardware [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1245.741776] env[61215]: DEBUG nova.virt.hardware [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1245.741927] env[61215]: DEBUG nova.virt.hardware [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Got 1 possible topologies {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1245.743707] env[61215]: DEBUG nova.virt.hardware [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1245.743707] env[61215]: DEBUG nova.virt.hardware [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1245.746194] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73b73153-f90f-4134-96d4-489b6a196a0d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.759716] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dbea187-6459-483e-be3b-1f344986c4e2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.766141] env[61215]: DEBUG nova.virt.hardware [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:05:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:04:45Z,direct_url=,disk_format='vmdk',id=e91f0c25-9ff9-4937-8440-f47cfb2028bc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9bc3f57c82894c5d9e08b66e77a25dc5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:04:46Z,virtual_size=,visibility=), allow threads: False {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1245.766418] env[61215]: DEBUG nova.virt.hardware [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Flavor limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1245.766591] env[61215]: DEBUG nova.virt.hardware [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Image limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1245.766793] env[61215]: DEBUG nova.virt.hardware [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Flavor pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1245.766941] env[61215]: DEBUG nova.virt.hardware [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Image pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1245.767100] env[61215]: DEBUG nova.virt.hardware [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1245.767433] env[61215]: DEBUG nova.virt.hardware [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1245.767504] env[61215]: DEBUG nova.virt.hardware [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1245.767624] env[61215]: DEBUG nova.virt.hardware [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Got 1 possible topologies {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1245.767785] env[61215]: DEBUG nova.virt.hardware [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1245.767961] env[61215]: DEBUG nova.virt.hardware [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1245.769137] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b602f3e-8161-4830-a9ae-bbfd4cbc5e7f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.788940] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3480f8c8-c319-49e1-a511-0dd7d41ec82c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.813015] env[61215]: DEBUG nova.policy [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '36302aa8bc2745a0b7952ee6c74050bb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '68fd74a5960e4273bc406809c227d1a8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61215) authorize /opt/stack/nova/nova/policy.py:203}} [ 1245.825677] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d81e0bf5-0ea9-4842-ab74-9a0feb5d3c51 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.938266] env[61215]: DEBUG nova.policy [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b959d559552d400282880d1b207e61e2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a867bdfa83f9414b9e0b83b27a34b51c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61215) authorize /opt/stack/nova/nova/policy.py:203}} [ 1246.547469] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Acquiring lock "351c2ada-945a-4f0b-8fa9-47e3412c5e05" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1246.547913] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Lock "351c2ada-945a-4f0b-8fa9-47e3412c5e05" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1246.563662] env[61215]: DEBUG nova.compute.manager [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1246.648140] env[61215]: DEBUG nova.network.neutron [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Successfully created port: 94e9036f-7fb6-4e92-a896-364ddcc37e55 {{(pid=61215) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1246.660016] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1246.660016] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1246.660016] env[61215]: INFO nova.compute.claims [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1246.668084] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1246.811028] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8860cdd8-ee6a-4475-899c-1b8b3b27af88 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.820123] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8462918-478f-4225-bad4-dec55fd31133 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.854851] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-202a49b4-4be5-450a-844a-bb7590c9444b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.867117] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d34a13ca-1dd8-40c9-9557-7745f987d283 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.881754] env[61215]: DEBUG nova.compute.provider_tree [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1246.897209] env[61215]: DEBUG nova.scheduler.client.report [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1246.927146] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.267s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1246.927146] env[61215]: DEBUG nova.compute.manager [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Start building networks asynchronously for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1246.982244] env[61215]: DEBUG nova.compute.utils [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Using /dev/sd instead of None {{(pid=61215) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1246.986756] env[61215]: DEBUG nova.compute.manager [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Allocating IP information in the background. {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1246.987032] env[61215]: DEBUG nova.network.neutron [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] allocate_for_instance() {{(pid=61215) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1247.003755] env[61215]: DEBUG nova.compute.manager [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Start building block device mappings for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1247.130676] env[61215]: DEBUG nova.compute.manager [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Start spawning the instance on the hypervisor. {{(pid=61215) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1247.154873] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Acquiring lock "d2756d6f-d1f6-4408-83a6-3cbae8bf8b04" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1247.155149] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Lock "d2756d6f-d1f6-4408-83a6-3cbae8bf8b04" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1247.167228] env[61215]: DEBUG nova.virt.hardware [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:05:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:04:45Z,direct_url=,disk_format='vmdk',id=e91f0c25-9ff9-4937-8440-f47cfb2028bc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9bc3f57c82894c5d9e08b66e77a25dc5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:04:46Z,virtual_size=,visibility=), allow threads: False {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1247.167482] env[61215]: DEBUG nova.virt.hardware [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Flavor limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1247.167733] env[61215]: DEBUG nova.virt.hardware [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Image limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1247.168084] env[61215]: DEBUG nova.virt.hardware [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Flavor pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1247.168210] env[61215]: DEBUG nova.virt.hardware [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Image pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1247.168372] env[61215]: DEBUG nova.virt.hardware [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1247.168706] env[61215]: DEBUG nova.virt.hardware [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1247.168771] env[61215]: DEBUG nova.virt.hardware [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1247.168907] env[61215]: DEBUG nova.virt.hardware [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Got 1 possible topologies {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1247.169077] env[61215]: DEBUG nova.virt.hardware [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1247.169264] env[61215]: DEBUG nova.virt.hardware [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1247.171396] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-452fb2c3-6a2e-4377-afc5-7e4918bdb597 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.175472] env[61215]: DEBUG nova.compute.manager [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1247.183957] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aa90ced-8459-4178-a352-a7e9a0ac9beb {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.247758] env[61215]: DEBUG nova.policy [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd0c650f770f04ec8b0f21425842c5db5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2ae8261b5f16427d8c5dd6e205f5b317', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61215) authorize /opt/stack/nova/nova/policy.py:203}} [ 1247.262639] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1247.262888] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1247.265305] env[61215]: INFO nova.compute.claims [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1247.432727] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fea4d28-d2e4-45e3-bc57-8471d25629d4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.440727] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95c9380c-6f95-4c74-a1d3-7ec3aec51284 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.444696] env[61215]: DEBUG nova.network.neutron [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Successfully created port: a6abc627-04a4-4166-b281-cdef37bbea65 {{(pid=61215) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1247.475750] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25c2a0b5-735d-4275-8dba-9cc33e42bf7f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.484326] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d86e1b2-11bd-46d0-be84-4736b342ff81 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.499221] env[61215]: DEBUG nova.compute.provider_tree [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1247.514254] env[61215]: DEBUG nova.scheduler.client.report [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1247.543381] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.280s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1247.544306] env[61215]: DEBUG nova.compute.manager [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Start building networks asynchronously for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1247.584903] env[61215]: DEBUG nova.compute.utils [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Using /dev/sd instead of None {{(pid=61215) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1247.586271] env[61215]: DEBUG nova.compute.manager [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Allocating IP information in the background. {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1247.588717] env[61215]: DEBUG nova.network.neutron [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] allocate_for_instance() {{(pid=61215) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1247.597831] env[61215]: DEBUG nova.compute.manager [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Start building block device mappings for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1247.658221] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1247.692993] env[61215]: DEBUG nova.compute.manager [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Start spawning the instance on the hypervisor. {{(pid=61215) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1247.725746] env[61215]: DEBUG nova.virt.hardware [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:05:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:04:45Z,direct_url=,disk_format='vmdk',id=e91f0c25-9ff9-4937-8440-f47cfb2028bc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9bc3f57c82894c5d9e08b66e77a25dc5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:04:46Z,virtual_size=,visibility=), allow threads: False {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1247.725746] env[61215]: DEBUG nova.virt.hardware [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Flavor limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1247.725746] env[61215]: DEBUG nova.virt.hardware [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Image limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1247.726148] env[61215]: DEBUG nova.virt.hardware [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Flavor pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1247.726148] env[61215]: DEBUG nova.virt.hardware [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Image pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1247.726148] env[61215]: DEBUG nova.virt.hardware [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1247.726601] env[61215]: DEBUG nova.virt.hardware [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1247.726816] env[61215]: DEBUG nova.virt.hardware [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1247.727022] env[61215]: DEBUG nova.virt.hardware [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Got 1 possible topologies {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1247.727203] env[61215]: DEBUG nova.virt.hardware [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1247.727418] env[61215]: DEBUG nova.virt.hardware [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1247.728735] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cce2edc5-a4c0-4fb8-ba4c-3a2da471a6de {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.740870] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc00e3cc-33c5-47af-8456-ec36429a7953 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.805615] env[61215]: DEBUG nova.policy [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '945e16883d9746eb9f07beca0bdd6c93', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '69b85dab58ff4d1780ed5ec65c1ff99c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61215) authorize /opt/stack/nova/nova/policy.py:203}} [ 1248.163825] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Acquiring lock "805748d7-e459-4608-a02d-05ac56c48290" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1248.163825] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Lock "805748d7-e459-4608-a02d-05ac56c48290" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1248.180942] env[61215]: DEBUG nova.compute.manager [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1248.270723] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1248.270989] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1248.273076] env[61215]: INFO nova.compute.claims [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1248.422787] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2a4715b-f56d-49a3-a7c2-7803a1153e24 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.432773] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76292704-5b77-4597-b4e7-e1248b4526b6 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.468859] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ff993cd-c4aa-4e11-96e9-4e9470fa9fef {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.477008] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82c48492-14e0-4d3e-9c7d-8e132c7039a4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.493595] env[61215]: DEBUG nova.compute.provider_tree [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1248.505646] env[61215]: DEBUG nova.scheduler.client.report [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1248.532421] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.261s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1248.532969] env[61215]: DEBUG nova.compute.manager [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Start building networks asynchronously for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1248.580997] env[61215]: DEBUG nova.compute.utils [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Using /dev/sd instead of None {{(pid=61215) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1248.583260] env[61215]: DEBUG nova.compute.manager [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Allocating IP information in the background. {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1248.583415] env[61215]: DEBUG nova.network.neutron [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] allocate_for_instance() {{(pid=61215) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1248.596597] env[61215]: DEBUG nova.compute.manager [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Start building block device mappings for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1248.654707] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1248.655324] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1248.655562] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1248.655770] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61215) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 1248.683306] env[61215]: DEBUG nova.compute.manager [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Start spawning the instance on the hypervisor. {{(pid=61215) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1248.708441] env[61215]: DEBUG nova.virt.hardware [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:05:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:04:45Z,direct_url=,disk_format='vmdk',id=e91f0c25-9ff9-4937-8440-f47cfb2028bc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9bc3f57c82894c5d9e08b66e77a25dc5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:04:46Z,virtual_size=,visibility=), allow threads: False {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1248.708708] env[61215]: DEBUG nova.virt.hardware [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Flavor limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1248.708884] env[61215]: DEBUG nova.virt.hardware [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Image limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1248.709020] env[61215]: DEBUG nova.virt.hardware [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Flavor pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1248.709170] env[61215]: DEBUG nova.virt.hardware [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Image pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1248.709397] env[61215]: DEBUG nova.virt.hardware [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1248.710086] env[61215]: DEBUG nova.virt.hardware [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1248.710265] env[61215]: DEBUG nova.virt.hardware [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1248.710436] env[61215]: DEBUG nova.virt.hardware [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Got 1 possible topologies {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1248.710599] env[61215]: DEBUG nova.virt.hardware [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1248.711648] env[61215]: DEBUG nova.virt.hardware [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1248.712797] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9123ac57-11c0-433d-b9d0-d06a229d1649 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.723667] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d976c8c-b9f2-44f7-a6d6-434cc58c2fe3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.777129] env[61215]: DEBUG nova.policy [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bd2a25631b3c448993b3648a4a8cc854', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '53fec98c0f69472497a1245f69c6b238', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61215) authorize /opt/stack/nova/nova/policy.py:203}} [ 1248.946279] env[61215]: DEBUG nova.network.neutron [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Successfully created port: 65b64270-4c6d-478e-bd7c-0fed0f33fc2f {{(pid=61215) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1249.187790] env[61215]: DEBUG nova.network.neutron [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Successfully created port: fd9e8bcc-f8fe-4b5a-971a-3e78b28fcb81 {{(pid=61215) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1249.372197] env[61215]: DEBUG oslo_concurrency.lockutils [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Acquiring lock "ef0f6995-b272-4a45-a09d-5d8d38ffe23c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1249.372435] env[61215]: DEBUG oslo_concurrency.lockutils [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Lock "ef0f6995-b272-4a45-a09d-5d8d38ffe23c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1249.399982] env[61215]: DEBUG nova.compute.manager [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1249.484071] env[61215]: DEBUG oslo_concurrency.lockutils [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1249.488192] env[61215]: DEBUG oslo_concurrency.lockutils [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1249.489799] env[61215]: INFO nova.compute.claims [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1249.656225] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1249.693716] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-306ff677-a9ce-4733-bdd4-6572838797f0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.706471] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07a5710a-cd0e-439b-a696-06b205072f92 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.738797] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fc98a9c-d32e-47cd-b61c-4f7d31e9850c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.747121] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4be6c5c-3a3c-45b0-b7d6-9fa3386e9505 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.762805] env[61215]: DEBUG nova.compute.provider_tree [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1249.771486] env[61215]: DEBUG nova.scheduler.client.report [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1249.795135] env[61215]: DEBUG oslo_concurrency.lockutils [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.311s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1249.796683] env[61215]: DEBUG nova.compute.manager [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Start building networks asynchronously for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1249.832987] env[61215]: DEBUG nova.compute.utils [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Using /dev/sd instead of None {{(pid=61215) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1249.834773] env[61215]: DEBUG nova.compute.manager [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Allocating IP information in the background. {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1249.834981] env[61215]: DEBUG nova.network.neutron [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] allocate_for_instance() {{(pid=61215) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1249.851054] env[61215]: DEBUG nova.compute.manager [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Start building block device mappings for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1249.963725] env[61215]: DEBUG nova.compute.manager [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Start spawning the instance on the hypervisor. {{(pid=61215) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1250.008710] env[61215]: DEBUG nova.virt.hardware [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:05:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:04:45Z,direct_url=,disk_format='vmdk',id=e91f0c25-9ff9-4937-8440-f47cfb2028bc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9bc3f57c82894c5d9e08b66e77a25dc5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:04:46Z,virtual_size=,visibility=), allow threads: False {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1250.009041] env[61215]: DEBUG nova.virt.hardware [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Flavor limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1250.009201] env[61215]: DEBUG nova.virt.hardware [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Image limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1250.009982] env[61215]: DEBUG nova.virt.hardware [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Flavor pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1250.009982] env[61215]: DEBUG nova.virt.hardware [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Image pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1250.009982] env[61215]: DEBUG nova.virt.hardware [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1250.009982] env[61215]: DEBUG nova.virt.hardware [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1250.010437] env[61215]: DEBUG nova.virt.hardware [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1250.010437] env[61215]: DEBUG nova.virt.hardware [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Got 1 possible topologies {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1250.010544] env[61215]: DEBUG nova.virt.hardware [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1250.010691] env[61215]: DEBUG nova.virt.hardware [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1250.013864] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3d98ef1-240e-48bf-ba4a-0d9dfd8f6973 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.023798] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c7808dd-5d9a-4b40-97a4-37e4534b94b2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.414934] env[61215]: DEBUG nova.policy [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4ec99e8ac9a940bc880e22a8cc408cb0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '074004f542ab46afb2abc965bdc00892', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61215) authorize /opt/stack/nova/nova/policy.py:203}} [ 1250.578834] env[61215]: DEBUG nova.network.neutron [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Successfully created port: 81e39a76-5d88-49b2-8442-77cf90acfb2f {{(pid=61215) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1250.656012] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1250.669632] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1250.669852] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1250.670203] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1250.670282] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61215) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1250.671591] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08eb9ebe-4be2-450c-b103-c12624ae5f8e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.687671] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6e66541-144c-4c01-834b-e9abb7842432 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.707922] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d48c815-255e-4739-a832-fa7538049027 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.713343] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-109fee5f-ebcd-46ba-9831-93467072f6dc {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.745745] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181308MB free_disk=173GB free_vcpus=48 pci_devices=None {{(pid=61215) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1250.745989] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1250.746263] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1250.820556] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1250.820707] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 1eaf05ba-8235-4a68-b807-db95e65c0933 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1250.820860] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 351c2ada-945a-4f0b-8fa9-47e3412c5e05 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1250.820989] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance d2756d6f-d1f6-4408-83a6-3cbae8bf8b04 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1250.821120] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 805748d7-e459-4608-a02d-05ac56c48290 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1250.821238] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance ef0f6995-b272-4a45-a09d-5d8d38ffe23c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1250.821423] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1250.822040] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1250.961709] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0f673b1-344e-471e-91a0-dceb3a6a067d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.974746] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb3dabd9-5e3a-4d0e-a026-321bc336f4ef {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.013152] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06a2a768-5a28-4ec3-9624-413eaf3e5c08 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.023399] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bf99409-140e-44ea-8a99-f8be20dbb777 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.046549] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1251.058669] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1251.080688] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61215) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1251.080923] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.335s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1251.673143] env[61215]: DEBUG nova.network.neutron [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Successfully updated port: a6abc627-04a4-4166-b281-cdef37bbea65 {{(pid=61215) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1251.691079] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Acquiring lock "refresh_cache-1eaf05ba-8235-4a68-b807-db95e65c0933" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1251.691585] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Acquired lock "refresh_cache-1eaf05ba-8235-4a68-b807-db95e65c0933" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1251.691788] env[61215]: DEBUG nova.network.neutron [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1251.911110] env[61215]: DEBUG nova.network.neutron [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1252.071149] env[61215]: DEBUG nova.network.neutron [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Successfully updated port: 94e9036f-7fb6-4e92-a896-364ddcc37e55 {{(pid=61215) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1252.093899] env[61215]: DEBUG oslo_concurrency.lockutils [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Acquiring lock "refresh_cache-2a605c78-bef9-44f0-bcd0-e5bd3e1b0028" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1252.093899] env[61215]: DEBUG oslo_concurrency.lockutils [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Acquired lock "refresh_cache-2a605c78-bef9-44f0-bcd0-e5bd3e1b0028" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1252.093899] env[61215]: DEBUG nova.network.neutron [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1252.330757] env[61215]: DEBUG nova.network.neutron [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1252.693234] env[61215]: DEBUG nova.compute.manager [req-9ad39f39-3c6e-4284-8ed3-5645315f1b8b req-9abe6c56-1262-4496-901b-c3c55dfb4af0 service nova] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Received event network-vif-plugged-a6abc627-04a4-4166-b281-cdef37bbea65 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1252.693234] env[61215]: DEBUG oslo_concurrency.lockutils [req-9ad39f39-3c6e-4284-8ed3-5645315f1b8b req-9abe6c56-1262-4496-901b-c3c55dfb4af0 service nova] Acquiring lock "1eaf05ba-8235-4a68-b807-db95e65c0933-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1252.693234] env[61215]: DEBUG oslo_concurrency.lockutils [req-9ad39f39-3c6e-4284-8ed3-5645315f1b8b req-9abe6c56-1262-4496-901b-c3c55dfb4af0 service nova] Lock "1eaf05ba-8235-4a68-b807-db95e65c0933-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1252.693234] env[61215]: DEBUG oslo_concurrency.lockutils [req-9ad39f39-3c6e-4284-8ed3-5645315f1b8b req-9abe6c56-1262-4496-901b-c3c55dfb4af0 service nova] Lock "1eaf05ba-8235-4a68-b807-db95e65c0933-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1252.693630] env[61215]: DEBUG nova.compute.manager [req-9ad39f39-3c6e-4284-8ed3-5645315f1b8b req-9abe6c56-1262-4496-901b-c3c55dfb4af0 service nova] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] No waiting events found dispatching network-vif-plugged-a6abc627-04a4-4166-b281-cdef37bbea65 {{(pid=61215) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1252.694204] env[61215]: WARNING nova.compute.manager [req-9ad39f39-3c6e-4284-8ed3-5645315f1b8b req-9abe6c56-1262-4496-901b-c3c55dfb4af0 service nova] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Received unexpected event network-vif-plugged-a6abc627-04a4-4166-b281-cdef37bbea65 for instance with vm_state building and task_state spawning. [ 1252.894526] env[61215]: DEBUG nova.compute.manager [req-8bf29c3b-44d9-40a2-a13f-c1fd21963497 req-5195ea4b-ee22-42bd-a6fb-03148e754cd0 service nova] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Received event network-vif-plugged-94e9036f-7fb6-4e92-a896-364ddcc37e55 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1252.894933] env[61215]: DEBUG oslo_concurrency.lockutils [req-8bf29c3b-44d9-40a2-a13f-c1fd21963497 req-5195ea4b-ee22-42bd-a6fb-03148e754cd0 service nova] Acquiring lock "2a605c78-bef9-44f0-bcd0-e5bd3e1b0028-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1252.895214] env[61215]: DEBUG oslo_concurrency.lockutils [req-8bf29c3b-44d9-40a2-a13f-c1fd21963497 req-5195ea4b-ee22-42bd-a6fb-03148e754cd0 service nova] Lock "2a605c78-bef9-44f0-bcd0-e5bd3e1b0028-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1252.895395] env[61215]: DEBUG oslo_concurrency.lockutils [req-8bf29c3b-44d9-40a2-a13f-c1fd21963497 req-5195ea4b-ee22-42bd-a6fb-03148e754cd0 service nova] Lock "2a605c78-bef9-44f0-bcd0-e5bd3e1b0028-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1252.895774] env[61215]: DEBUG nova.compute.manager [req-8bf29c3b-44d9-40a2-a13f-c1fd21963497 req-5195ea4b-ee22-42bd-a6fb-03148e754cd0 service nova] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] No waiting events found dispatching network-vif-plugged-94e9036f-7fb6-4e92-a896-364ddcc37e55 {{(pid=61215) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1252.896058] env[61215]: WARNING nova.compute.manager [req-8bf29c3b-44d9-40a2-a13f-c1fd21963497 req-5195ea4b-ee22-42bd-a6fb-03148e754cd0 service nova] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Received unexpected event network-vif-plugged-94e9036f-7fb6-4e92-a896-364ddcc37e55 for instance with vm_state building and task_state spawning. [ 1253.240667] env[61215]: DEBUG nova.network.neutron [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Updating instance_info_cache with network_info: [{"id": "a6abc627-04a4-4166-b281-cdef37bbea65", "address": "fa:16:3e:f8:fa:f6", "network": {"id": "98f44a92-d079-4367-854c-fcb97b85f66e", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1648648018-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a867bdfa83f9414b9e0b83b27a34b51c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6abc627-04", "ovs_interfaceid": "a6abc627-04a4-4166-b281-cdef37bbea65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1253.260870] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Releasing lock "refresh_cache-1eaf05ba-8235-4a68-b807-db95e65c0933" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1253.260870] env[61215]: DEBUG nova.compute.manager [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Instance network_info: |[{"id": "a6abc627-04a4-4166-b281-cdef37bbea65", "address": "fa:16:3e:f8:fa:f6", "network": {"id": "98f44a92-d079-4367-854c-fcb97b85f66e", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1648648018-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a867bdfa83f9414b9e0b83b27a34b51c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6abc627-04", "ovs_interfaceid": "a6abc627-04a4-4166-b281-cdef37bbea65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1253.261715] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f8:fa:f6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea4fe416-47a6-4542-b59d-8c71ab4d6503', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a6abc627-04a4-4166-b281-cdef37bbea65', 'vif_model': 'vmxnet3'}] {{(pid=61215) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1253.279030] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1253.279758] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a520b380-8349-42c7-8f2b-2fa32741d7fc {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.294340] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Created folder: OpenStack in parent group-v4. [ 1253.294416] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Creating folder: Project (a867bdfa83f9414b9e0b83b27a34b51c). Parent ref: group-v352463. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1253.294722] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d0d22efe-c52f-4eb6-87e5-ff386c3fdb71 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.306353] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Created folder: Project (a867bdfa83f9414b9e0b83b27a34b51c) in parent group-v352463. [ 1253.306586] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Creating folder: Instances. Parent ref: group-v352464. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1253.306821] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4a91772b-0d24-4ec5-b3f8-8f25effcdeab {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.320382] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Created folder: Instances in parent group-v352464. [ 1253.320738] env[61215]: DEBUG oslo.service.loopingcall [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1253.320873] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Creating VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1253.321182] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-464b6e03-cc7d-44ce-a0bf-8c64e639a20d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.348721] env[61215]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1253.348721] env[61215]: value = "task-1690257" [ 1253.348721] env[61215]: _type = "Task" [ 1253.348721] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.362054] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690257, 'name': CreateVM_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.453135] env[61215]: DEBUG nova.network.neutron [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Successfully updated port: fd9e8bcc-f8fe-4b5a-971a-3e78b28fcb81 {{(pid=61215) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1253.474769] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Acquiring lock "refresh_cache-d2756d6f-d1f6-4408-83a6-3cbae8bf8b04" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1253.474769] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Acquired lock "refresh_cache-d2756d6f-d1f6-4408-83a6-3cbae8bf8b04" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1253.474769] env[61215]: DEBUG nova.network.neutron [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1253.640954] env[61215]: DEBUG nova.network.neutron [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1253.664321] env[61215]: DEBUG nova.network.neutron [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Updating instance_info_cache with network_info: [{"id": "94e9036f-7fb6-4e92-a896-364ddcc37e55", "address": "fa:16:3e:f0:06:b5", "network": {"id": "1c6debea-9c5f-466e-b534-edd5f056858c", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1979252212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68fd74a5960e4273bc406809c227d1a8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaba65c3-6925-4c7f-83b6-17cd1a328e27", "external-id": "nsx-vlan-transportzone-202", "segmentation_id": 202, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94e9036f-7f", "ovs_interfaceid": "94e9036f-7fb6-4e92-a896-364ddcc37e55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1253.675094] env[61215]: DEBUG nova.network.neutron [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Successfully created port: 5d3e9a06-08f1-40ee-9d62-5fdccb302916 {{(pid=61215) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1253.682615] env[61215]: DEBUG oslo_concurrency.lockutils [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Releasing lock "refresh_cache-2a605c78-bef9-44f0-bcd0-e5bd3e1b0028" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1253.682615] env[61215]: DEBUG nova.compute.manager [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Instance network_info: |[{"id": "94e9036f-7fb6-4e92-a896-364ddcc37e55", "address": "fa:16:3e:f0:06:b5", "network": {"id": "1c6debea-9c5f-466e-b534-edd5f056858c", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1979252212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68fd74a5960e4273bc406809c227d1a8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaba65c3-6925-4c7f-83b6-17cd1a328e27", "external-id": "nsx-vlan-transportzone-202", "segmentation_id": 202, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94e9036f-7f", "ovs_interfaceid": "94e9036f-7fb6-4e92-a896-364ddcc37e55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1253.682940] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f0:06:b5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eaba65c3-6925-4c7f-83b6-17cd1a328e27', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '94e9036f-7fb6-4e92-a896-364ddcc37e55', 'vif_model': 'vmxnet3'}] {{(pid=61215) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1253.690652] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Creating folder: Project (68fd74a5960e4273bc406809c227d1a8). Parent ref: group-v352463. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1253.691396] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b855ace8-139c-485c-8bec-a4b7ea16d7fa {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.704408] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Created folder: Project (68fd74a5960e4273bc406809c227d1a8) in parent group-v352463. [ 1253.704675] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Creating folder: Instances. Parent ref: group-v352467. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1253.706470] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-de45b509-02bf-4ae1-8056-0bd319ec80de {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.717432] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Created folder: Instances in parent group-v352467. [ 1253.717740] env[61215]: DEBUG oslo.service.loopingcall [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1253.717970] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Creating VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1253.718228] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-baf2c5dc-3870-40fb-823f-51d867271176 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.749060] env[61215]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1253.749060] env[61215]: value = "task-1690260" [ 1253.749060] env[61215]: _type = "Task" [ 1253.749060] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.760083] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690260, 'name': CreateVM_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.860859] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690257, 'name': CreateVM_Task} progress is 99%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.022181] env[61215]: DEBUG nova.network.neutron [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Successfully updated port: 65b64270-4c6d-478e-bd7c-0fed0f33fc2f {{(pid=61215) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1254.045294] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Acquiring lock "refresh_cache-351c2ada-945a-4f0b-8fa9-47e3412c5e05" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1254.045294] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Acquired lock "refresh_cache-351c2ada-945a-4f0b-8fa9-47e3412c5e05" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1254.045294] env[61215]: DEBUG nova.network.neutron [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1254.243832] env[61215]: DEBUG nova.network.neutron [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1254.264654] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690260, 'name': CreateVM_Task, 'duration_secs': 0.400127} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.264841] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Created VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1254.299823] env[61215]: DEBUG oslo_vmware.service [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a89bb63-d2ab-44ab-a290-ed3957d1ef18 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.309204] env[61215]: DEBUG oslo_concurrency.lockutils [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1254.309362] env[61215]: DEBUG oslo_concurrency.lockutils [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1254.310094] env[61215]: DEBUG oslo_concurrency.lockutils [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1254.310378] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c0025c2-aa49-47a9-9215-a332d00dad7b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.316361] env[61215]: DEBUG oslo_vmware.api [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Waiting for the task: (returnval){ [ 1254.316361] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]526259d3-685c-d69c-eede-53779de7fbde" [ 1254.316361] env[61215]: _type = "Task" [ 1254.316361] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.328343] env[61215]: DEBUG oslo_vmware.api [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]526259d3-685c-d69c-eede-53779de7fbde, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.367843] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690257, 'name': CreateVM_Task, 'duration_secs': 0.590269} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.368066] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Created VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1254.368750] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1254.398860] env[61215]: DEBUG nova.network.neutron [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Successfully updated port: 81e39a76-5d88-49b2-8442-77cf90acfb2f {{(pid=61215) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1254.404019] env[61215]: DEBUG nova.network.neutron [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Updating instance_info_cache with network_info: [{"id": "fd9e8bcc-f8fe-4b5a-971a-3e78b28fcb81", "address": "fa:16:3e:9e:b4:1c", "network": {"id": "7eeb68c4-c3ff-4e6f-b7f9-6320ce2d34b3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.26", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9bc3f57c82894c5d9e08b66e77a25dc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2383c27-232e-4745-9b0a-2dcbaabb188b", "external-id": "nsx-vlan-transportzone-350", "segmentation_id": 350, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd9e8bcc-f8", "ovs_interfaceid": "fd9e8bcc-f8fe-4b5a-971a-3e78b28fcb81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1254.419521] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Acquiring lock "refresh_cache-805748d7-e459-4608-a02d-05ac56c48290" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1254.419680] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Acquired lock "refresh_cache-805748d7-e459-4608-a02d-05ac56c48290" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1254.419834] env[61215]: DEBUG nova.network.neutron [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1254.428146] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Releasing lock "refresh_cache-d2756d6f-d1f6-4408-83a6-3cbae8bf8b04" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1254.429595] env[61215]: DEBUG nova.compute.manager [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Instance network_info: |[{"id": "fd9e8bcc-f8fe-4b5a-971a-3e78b28fcb81", "address": "fa:16:3e:9e:b4:1c", "network": {"id": "7eeb68c4-c3ff-4e6f-b7f9-6320ce2d34b3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.26", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9bc3f57c82894c5d9e08b66e77a25dc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2383c27-232e-4745-9b0a-2dcbaabb188b", "external-id": "nsx-vlan-transportzone-350", "segmentation_id": 350, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd9e8bcc-f8", "ovs_interfaceid": "fd9e8bcc-f8fe-4b5a-971a-3e78b28fcb81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1254.429752] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9e:b4:1c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c2383c27-232e-4745-9b0a-2dcbaabb188b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fd9e8bcc-f8fe-4b5a-971a-3e78b28fcb81', 'vif_model': 'vmxnet3'}] {{(pid=61215) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1254.440828] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Creating folder: Project (69b85dab58ff4d1780ed5ec65c1ff99c). Parent ref: group-v352463. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1254.442154] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9c337f55-578a-4cfa-b16b-c5327483a95e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.456397] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Created folder: Project (69b85dab58ff4d1780ed5ec65c1ff99c) in parent group-v352463. [ 1254.456397] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Creating folder: Instances. Parent ref: group-v352470. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1254.456614] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c5d0e5c9-14ab-44f6-bcb5-9a486b8ee7a9 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.467867] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Created folder: Instances in parent group-v352470. [ 1254.468139] env[61215]: DEBUG oslo.service.loopingcall [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1254.468332] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Creating VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1254.468612] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-65a9618c-a5f9-433a-ada9-2b303f768d2c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.495940] env[61215]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1254.495940] env[61215]: value = "task-1690263" [ 1254.495940] env[61215]: _type = "Task" [ 1254.495940] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.507442] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690263, 'name': CreateVM_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.830110] env[61215]: DEBUG oslo_concurrency.lockutils [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1254.830365] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Processing image e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1254.830825] env[61215]: DEBUG oslo_concurrency.lockutils [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1254.830825] env[61215]: DEBUG oslo_concurrency.lockutils [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1254.831290] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1254.831849] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1254.832502] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1254.832502] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d3ddaf2d-35bd-4bb4-aee4-f2e7f2e60d18 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.834524] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34e20405-358e-4528-9cde-259ef16050ae {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.842021] env[61215]: DEBUG oslo_vmware.api [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Waiting for the task: (returnval){ [ 1254.842021] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52a1dbfa-6e5d-a266-b916-25d8330c3e38" [ 1254.842021] env[61215]: _type = "Task" [ 1254.842021] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.845703] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1254.846105] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61215) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1254.847233] env[61215]: DEBUG nova.network.neutron [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1254.850257] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3c37a57-a3dc-41d4-ad89-15b8f96de3cd {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.860498] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1254.860892] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Processing image e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1254.861896] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1254.864986] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3678456c-4b6e-4e31-be2a-160f1ec43446 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.870727] env[61215]: DEBUG oslo_vmware.api [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Waiting for the task: (returnval){ [ 1254.870727] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52b9f098-0472-2bdb-caf6-64c735609cd3" [ 1254.870727] env[61215]: _type = "Task" [ 1254.870727] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.879769] env[61215]: DEBUG oslo_vmware.api [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52b9f098-0472-2bdb-caf6-64c735609cd3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.015177] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690263, 'name': CreateVM_Task, 'duration_secs': 0.362945} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.015372] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Created VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1255.016129] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1255.016326] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1255.016660] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1255.016944] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7dae74b1-4365-469b-b2d4-48c62fb08020 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.026455] env[61215]: DEBUG oslo_vmware.api [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Waiting for the task: (returnval){ [ 1255.026455] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]5217ef8b-dd7f-b246-286f-004b7632a6ab" [ 1255.026455] env[61215]: _type = "Task" [ 1255.026455] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.037968] env[61215]: DEBUG oslo_vmware.api [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5217ef8b-dd7f-b246-286f-004b7632a6ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.308192] env[61215]: DEBUG nova.network.neutron [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Updating instance_info_cache with network_info: [{"id": "65b64270-4c6d-478e-bd7c-0fed0f33fc2f", "address": "fa:16:3e:05:4c:9a", "network": {"id": "7eeb68c4-c3ff-4e6f-b7f9-6320ce2d34b3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.87", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9bc3f57c82894c5d9e08b66e77a25dc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2383c27-232e-4745-9b0a-2dcbaabb188b", "external-id": "nsx-vlan-transportzone-350", "segmentation_id": 350, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap65b64270-4c", "ovs_interfaceid": "65b64270-4c6d-478e-bd7c-0fed0f33fc2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1255.322630] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Releasing lock "refresh_cache-351c2ada-945a-4f0b-8fa9-47e3412c5e05" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1255.323276] env[61215]: DEBUG nova.compute.manager [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Instance network_info: |[{"id": "65b64270-4c6d-478e-bd7c-0fed0f33fc2f", "address": "fa:16:3e:05:4c:9a", "network": {"id": "7eeb68c4-c3ff-4e6f-b7f9-6320ce2d34b3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.87", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9bc3f57c82894c5d9e08b66e77a25dc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2383c27-232e-4745-9b0a-2dcbaabb188b", "external-id": "nsx-vlan-transportzone-350", "segmentation_id": 350, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap65b64270-4c", "ovs_interfaceid": "65b64270-4c6d-478e-bd7c-0fed0f33fc2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1255.324604] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:05:4c:9a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c2383c27-232e-4745-9b0a-2dcbaabb188b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '65b64270-4c6d-478e-bd7c-0fed0f33fc2f', 'vif_model': 'vmxnet3'}] {{(pid=61215) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1255.332290] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Creating folder: Project (2ae8261b5f16427d8c5dd6e205f5b317). Parent ref: group-v352463. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1255.333474] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f51b9202-0d2e-49e0-aeda-0c0649ed2b03 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.344779] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Created folder: Project (2ae8261b5f16427d8c5dd6e205f5b317) in parent group-v352463. [ 1255.345061] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Creating folder: Instances. Parent ref: group-v352473. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1255.345323] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c1133c3f-6485-4873-94ab-d40f4abcee27 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.361749] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Created folder: Instances in parent group-v352473. [ 1255.361749] env[61215]: DEBUG oslo.service.loopingcall [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1255.361749] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Creating VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1255.362800] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2709f39f-3990-49b7-8de7-6d4902a31eca {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.391011] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Preparing fetch location {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1255.391011] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Creating directory with path [datastore1] vmware_temp/95b31884-c35e-4d3c-a7a8-61f3907c5c61/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1255.391240] env[61215]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1255.391240] env[61215]: value = "task-1690266" [ 1255.391240] env[61215]: _type = "Task" [ 1255.391240] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.391415] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fe3d515c-7cd8-433f-b345-54f6ec9f3709 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.402039] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690266, 'name': CreateVM_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.424108] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Created directory with path [datastore1] vmware_temp/95b31884-c35e-4d3c-a7a8-61f3907c5c61/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1255.424339] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Fetch image to [datastore1] vmware_temp/95b31884-c35e-4d3c-a7a8-61f3907c5c61/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1255.424513] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to [datastore1] vmware_temp/95b31884-c35e-4d3c-a7a8-61f3907c5c61/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1255.425368] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2f70d7e-b256-4b74-8ca8-6c0dffe7eb23 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.435486] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20b46634-be7b-461c-bcfb-8e842333c0b8 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.449187] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d994d92-15a7-4968-b068-99a297e82811 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.486318] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-324c5c56-2224-4917-9694-861f73757848 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.494696] env[61215]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6696d183-a0b2-4231-815d-3caa336e199b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.537432] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1255.537715] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Processing image e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1255.537997] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1255.578537] env[61215]: DEBUG nova.network.neutron [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Updating instance_info_cache with network_info: [{"id": "81e39a76-5d88-49b2-8442-77cf90acfb2f", "address": "fa:16:3e:4a:91:fe", "network": {"id": "7eeb68c4-c3ff-4e6f-b7f9-6320ce2d34b3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9bc3f57c82894c5d9e08b66e77a25dc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2383c27-232e-4745-9b0a-2dcbaabb188b", "external-id": "nsx-vlan-transportzone-350", "segmentation_id": 350, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap81e39a76-5d", "ovs_interfaceid": "81e39a76-5d88-49b2-8442-77cf90acfb2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1255.584261] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1255.605910] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Releasing lock "refresh_cache-805748d7-e459-4608-a02d-05ac56c48290" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1255.605910] env[61215]: DEBUG nova.compute.manager [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Instance network_info: |[{"id": "81e39a76-5d88-49b2-8442-77cf90acfb2f", "address": "fa:16:3e:4a:91:fe", "network": {"id": "7eeb68c4-c3ff-4e6f-b7f9-6320ce2d34b3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9bc3f57c82894c5d9e08b66e77a25dc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2383c27-232e-4745-9b0a-2dcbaabb188b", "external-id": "nsx-vlan-transportzone-350", "segmentation_id": 350, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap81e39a76-5d", "ovs_interfaceid": "81e39a76-5d88-49b2-8442-77cf90acfb2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1255.608813] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4a:91:fe', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c2383c27-232e-4745-9b0a-2dcbaabb188b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '81e39a76-5d88-49b2-8442-77cf90acfb2f', 'vif_model': 'vmxnet3'}] {{(pid=61215) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1255.619242] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Creating folder: Project (53fec98c0f69472497a1245f69c6b238). Parent ref: group-v352463. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1255.623309] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9478a4b6-42c7-4b58-b71a-6e04a796bad7 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.634467] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Created folder: Project (53fec98c0f69472497a1245f69c6b238) in parent group-v352463. [ 1255.634773] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Creating folder: Instances. Parent ref: group-v352476. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1255.635870] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9128c69f-0c37-4566-8d0b-f9d1557b5ce4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.654249] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Created folder: Instances in parent group-v352476. [ 1255.656783] env[61215]: DEBUG oslo.service.loopingcall [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1255.656783] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Creating VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1255.656783] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fc7d8669-d934-4dca-a459-2129053c2760 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.681604] env[61215]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1255.681604] env[61215]: value = "task-1690269" [ 1255.681604] env[61215]: _type = "Task" [ 1255.681604] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.690647] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690269, 'name': CreateVM_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.692914] env[61215]: DEBUG nova.compute.manager [req-b907c76e-6505-4e70-b7dd-b5f826e7bde9 req-3792d96d-9044-4689-8009-30460cd71cab service nova] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Received event network-changed-a6abc627-04a4-4166-b281-cdef37bbea65 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1255.693120] env[61215]: DEBUG nova.compute.manager [req-b907c76e-6505-4e70-b7dd-b5f826e7bde9 req-3792d96d-9044-4689-8009-30460cd71cab service nova] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Refreshing instance network info cache due to event network-changed-a6abc627-04a4-4166-b281-cdef37bbea65. {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 1255.693349] env[61215]: DEBUG oslo_concurrency.lockutils [req-b907c76e-6505-4e70-b7dd-b5f826e7bde9 req-3792d96d-9044-4689-8009-30460cd71cab service nova] Acquiring lock "refresh_cache-1eaf05ba-8235-4a68-b807-db95e65c0933" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1255.693501] env[61215]: DEBUG oslo_concurrency.lockutils [req-b907c76e-6505-4e70-b7dd-b5f826e7bde9 req-3792d96d-9044-4689-8009-30460cd71cab service nova] Acquired lock "refresh_cache-1eaf05ba-8235-4a68-b807-db95e65c0933" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1255.693665] env[61215]: DEBUG nova.network.neutron [req-b907c76e-6505-4e70-b7dd-b5f826e7bde9 req-3792d96d-9044-4689-8009-30460cd71cab service nova] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Refreshing network info cache for port a6abc627-04a4-4166-b281-cdef37bbea65 {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1255.711749] env[61215]: DEBUG oslo_vmware.rw_handles [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/95b31884-c35e-4d3c-a7a8-61f3907c5c61/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1255.781951] env[61215]: DEBUG nova.compute.manager [req-bf8ec31d-8f6f-4efd-985b-f798ae653a44 req-90b87780-643e-43e2-b91d-00d6894bb0d1 service nova] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Received event network-changed-94e9036f-7fb6-4e92-a896-364ddcc37e55 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1255.782263] env[61215]: DEBUG nova.compute.manager [req-bf8ec31d-8f6f-4efd-985b-f798ae653a44 req-90b87780-643e-43e2-b91d-00d6894bb0d1 service nova] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Refreshing instance network info cache due to event network-changed-94e9036f-7fb6-4e92-a896-364ddcc37e55. {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 1255.782479] env[61215]: DEBUG oslo_concurrency.lockutils [req-bf8ec31d-8f6f-4efd-985b-f798ae653a44 req-90b87780-643e-43e2-b91d-00d6894bb0d1 service nova] Acquiring lock "refresh_cache-2a605c78-bef9-44f0-bcd0-e5bd3e1b0028" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1255.782617] env[61215]: DEBUG oslo_concurrency.lockutils [req-bf8ec31d-8f6f-4efd-985b-f798ae653a44 req-90b87780-643e-43e2-b91d-00d6894bb0d1 service nova] Acquired lock "refresh_cache-2a605c78-bef9-44f0-bcd0-e5bd3e1b0028" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1255.782818] env[61215]: DEBUG nova.network.neutron [req-bf8ec31d-8f6f-4efd-985b-f798ae653a44 req-90b87780-643e-43e2-b91d-00d6894bb0d1 service nova] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Refreshing network info cache for port 94e9036f-7fb6-4e92-a896-364ddcc37e55 {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1255.786314] env[61215]: DEBUG oslo_vmware.rw_handles [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Completed reading data from the image iterator. {{(pid=61215) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1255.786494] env[61215]: DEBUG oslo_vmware.rw_handles [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/95b31884-c35e-4d3c-a7a8-61f3907c5c61/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1255.905301] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690266, 'name': CreateVM_Task, 'duration_secs': 0.345014} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.905591] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Created VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1255.906190] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1255.908900] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1255.908900] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1255.908900] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-320f17bb-410d-41cf-9a8f-663f335097fe {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.914286] env[61215]: DEBUG oslo_vmware.api [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Waiting for the task: (returnval){ [ 1255.914286] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52db557d-5fd5-2f06-3932-3f51f1e9f998" [ 1255.914286] env[61215]: _type = "Task" [ 1255.914286] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.924481] env[61215]: DEBUG oslo_vmware.api [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52db557d-5fd5-2f06-3932-3f51f1e9f998, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.192683] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690269, 'name': CreateVM_Task, 'duration_secs': 0.378248} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.192870] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Created VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1256.194027] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1256.421663] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1256.422220] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Processing image e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1256.422297] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1256.422482] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1256.422825] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1256.423115] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69d71ef6-0266-4d6e-8717-a1fa9bde20ea {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.428060] env[61215]: DEBUG oslo_vmware.api [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Waiting for the task: (returnval){ [ 1256.428060] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]529769fe-d108-90c7-90f9-eec64c602ac5" [ 1256.428060] env[61215]: _type = "Task" [ 1256.428060] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.436969] env[61215]: DEBUG oslo_vmware.api [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]529769fe-d108-90c7-90f9-eec64c602ac5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.933445] env[61215]: DEBUG nova.network.neutron [req-bf8ec31d-8f6f-4efd-985b-f798ae653a44 req-90b87780-643e-43e2-b91d-00d6894bb0d1 service nova] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Updated VIF entry in instance network info cache for port 94e9036f-7fb6-4e92-a896-364ddcc37e55. {{(pid=61215) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1256.933783] env[61215]: DEBUG nova.network.neutron [req-bf8ec31d-8f6f-4efd-985b-f798ae653a44 req-90b87780-643e-43e2-b91d-00d6894bb0d1 service nova] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Updating instance_info_cache with network_info: [{"id": "94e9036f-7fb6-4e92-a896-364ddcc37e55", "address": "fa:16:3e:f0:06:b5", "network": {"id": "1c6debea-9c5f-466e-b534-edd5f056858c", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1979252212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68fd74a5960e4273bc406809c227d1a8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eaba65c3-6925-4c7f-83b6-17cd1a328e27", "external-id": "nsx-vlan-transportzone-202", "segmentation_id": 202, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94e9036f-7f", "ovs_interfaceid": "94e9036f-7fb6-4e92-a896-364ddcc37e55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1256.941554] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1256.941834] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Processing image e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1256.942059] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1256.949336] env[61215]: DEBUG oslo_concurrency.lockutils [req-bf8ec31d-8f6f-4efd-985b-f798ae653a44 req-90b87780-643e-43e2-b91d-00d6894bb0d1 service nova] Releasing lock "refresh_cache-2a605c78-bef9-44f0-bcd0-e5bd3e1b0028" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1256.949697] env[61215]: DEBUG nova.compute.manager [req-bf8ec31d-8f6f-4efd-985b-f798ae653a44 req-90b87780-643e-43e2-b91d-00d6894bb0d1 service nova] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Received event network-vif-plugged-65b64270-4c6d-478e-bd7c-0fed0f33fc2f {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1256.949882] env[61215]: DEBUG oslo_concurrency.lockutils [req-bf8ec31d-8f6f-4efd-985b-f798ae653a44 req-90b87780-643e-43e2-b91d-00d6894bb0d1 service nova] Acquiring lock "351c2ada-945a-4f0b-8fa9-47e3412c5e05-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1256.950114] env[61215]: DEBUG oslo_concurrency.lockutils [req-bf8ec31d-8f6f-4efd-985b-f798ae653a44 req-90b87780-643e-43e2-b91d-00d6894bb0d1 service nova] Lock "351c2ada-945a-4f0b-8fa9-47e3412c5e05-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1256.950280] env[61215]: DEBUG oslo_concurrency.lockutils [req-bf8ec31d-8f6f-4efd-985b-f798ae653a44 req-90b87780-643e-43e2-b91d-00d6894bb0d1 service nova] Lock "351c2ada-945a-4f0b-8fa9-47e3412c5e05-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1256.950596] env[61215]: DEBUG nova.compute.manager [req-bf8ec31d-8f6f-4efd-985b-f798ae653a44 req-90b87780-643e-43e2-b91d-00d6894bb0d1 service nova] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] No waiting events found dispatching network-vif-plugged-65b64270-4c6d-478e-bd7c-0fed0f33fc2f {{(pid=61215) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1256.950718] env[61215]: WARNING nova.compute.manager [req-bf8ec31d-8f6f-4efd-985b-f798ae653a44 req-90b87780-643e-43e2-b91d-00d6894bb0d1 service nova] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Received unexpected event network-vif-plugged-65b64270-4c6d-478e-bd7c-0fed0f33fc2f for instance with vm_state building and task_state spawning. [ 1256.950987] env[61215]: DEBUG nova.compute.manager [req-bf8ec31d-8f6f-4efd-985b-f798ae653a44 req-90b87780-643e-43e2-b91d-00d6894bb0d1 service nova] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Received event network-changed-65b64270-4c6d-478e-bd7c-0fed0f33fc2f {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1256.951402] env[61215]: DEBUG nova.compute.manager [req-bf8ec31d-8f6f-4efd-985b-f798ae653a44 req-90b87780-643e-43e2-b91d-00d6894bb0d1 service nova] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Refreshing instance network info cache due to event network-changed-65b64270-4c6d-478e-bd7c-0fed0f33fc2f. {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 1256.951630] env[61215]: DEBUG oslo_concurrency.lockutils [req-bf8ec31d-8f6f-4efd-985b-f798ae653a44 req-90b87780-643e-43e2-b91d-00d6894bb0d1 service nova] Acquiring lock "refresh_cache-351c2ada-945a-4f0b-8fa9-47e3412c5e05" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1256.951782] env[61215]: DEBUG oslo_concurrency.lockutils [req-bf8ec31d-8f6f-4efd-985b-f798ae653a44 req-90b87780-643e-43e2-b91d-00d6894bb0d1 service nova] Acquired lock "refresh_cache-351c2ada-945a-4f0b-8fa9-47e3412c5e05" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1256.951936] env[61215]: DEBUG nova.network.neutron [req-bf8ec31d-8f6f-4efd-985b-f798ae653a44 req-90b87780-643e-43e2-b91d-00d6894bb0d1 service nova] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Refreshing network info cache for port 65b64270-4c6d-478e-bd7c-0fed0f33fc2f {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1257.145133] env[61215]: DEBUG nova.network.neutron [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Successfully updated port: 5d3e9a06-08f1-40ee-9d62-5fdccb302916 {{(pid=61215) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1257.164312] env[61215]: DEBUG oslo_concurrency.lockutils [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Acquiring lock "refresh_cache-ef0f6995-b272-4a45-a09d-5d8d38ffe23c" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1257.164484] env[61215]: DEBUG oslo_concurrency.lockutils [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Acquired lock "refresh_cache-ef0f6995-b272-4a45-a09d-5d8d38ffe23c" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1257.164661] env[61215]: DEBUG nova.network.neutron [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1257.342870] env[61215]: DEBUG nova.network.neutron [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1257.516202] env[61215]: DEBUG nova.network.neutron [req-b907c76e-6505-4e70-b7dd-b5f826e7bde9 req-3792d96d-9044-4689-8009-30460cd71cab service nova] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Updated VIF entry in instance network info cache for port a6abc627-04a4-4166-b281-cdef37bbea65. {{(pid=61215) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1257.516605] env[61215]: DEBUG nova.network.neutron [req-b907c76e-6505-4e70-b7dd-b5f826e7bde9 req-3792d96d-9044-4689-8009-30460cd71cab service nova] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Updating instance_info_cache with network_info: [{"id": "a6abc627-04a4-4166-b281-cdef37bbea65", "address": "fa:16:3e:f8:fa:f6", "network": {"id": "98f44a92-d079-4367-854c-fcb97b85f66e", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1648648018-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a867bdfa83f9414b9e0b83b27a34b51c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea4fe416-47a6-4542-b59d-8c71ab4d6503", "external-id": "nsx-vlan-transportzone-369", "segmentation_id": 369, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa6abc627-04", "ovs_interfaceid": "a6abc627-04a4-4166-b281-cdef37bbea65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1257.531047] env[61215]: DEBUG oslo_concurrency.lockutils [req-b907c76e-6505-4e70-b7dd-b5f826e7bde9 req-3792d96d-9044-4689-8009-30460cd71cab service nova] Releasing lock "refresh_cache-1eaf05ba-8235-4a68-b807-db95e65c0933" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1257.531322] env[61215]: DEBUG nova.compute.manager [req-b907c76e-6505-4e70-b7dd-b5f826e7bde9 req-3792d96d-9044-4689-8009-30460cd71cab service nova] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Received event network-vif-plugged-fd9e8bcc-f8fe-4b5a-971a-3e78b28fcb81 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1257.531518] env[61215]: DEBUG oslo_concurrency.lockutils [req-b907c76e-6505-4e70-b7dd-b5f826e7bde9 req-3792d96d-9044-4689-8009-30460cd71cab service nova] Acquiring lock "d2756d6f-d1f6-4408-83a6-3cbae8bf8b04-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1257.532723] env[61215]: DEBUG oslo_concurrency.lockutils [req-b907c76e-6505-4e70-b7dd-b5f826e7bde9 req-3792d96d-9044-4689-8009-30460cd71cab service nova] Lock "d2756d6f-d1f6-4408-83a6-3cbae8bf8b04-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1257.532723] env[61215]: DEBUG oslo_concurrency.lockutils [req-b907c76e-6505-4e70-b7dd-b5f826e7bde9 req-3792d96d-9044-4689-8009-30460cd71cab service nova] Lock "d2756d6f-d1f6-4408-83a6-3cbae8bf8b04-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1257.532723] env[61215]: DEBUG nova.compute.manager [req-b907c76e-6505-4e70-b7dd-b5f826e7bde9 req-3792d96d-9044-4689-8009-30460cd71cab service nova] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] No waiting events found dispatching network-vif-plugged-fd9e8bcc-f8fe-4b5a-971a-3e78b28fcb81 {{(pid=61215) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1257.532723] env[61215]: WARNING nova.compute.manager [req-b907c76e-6505-4e70-b7dd-b5f826e7bde9 req-3792d96d-9044-4689-8009-30460cd71cab service nova] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Received unexpected event network-vif-plugged-fd9e8bcc-f8fe-4b5a-971a-3e78b28fcb81 for instance with vm_state building and task_state spawning. [ 1257.532971] env[61215]: DEBUG nova.compute.manager [req-b907c76e-6505-4e70-b7dd-b5f826e7bde9 req-3792d96d-9044-4689-8009-30460cd71cab service nova] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Received event network-changed-fd9e8bcc-f8fe-4b5a-971a-3e78b28fcb81 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1257.532971] env[61215]: DEBUG nova.compute.manager [req-b907c76e-6505-4e70-b7dd-b5f826e7bde9 req-3792d96d-9044-4689-8009-30460cd71cab service nova] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Refreshing instance network info cache due to event network-changed-fd9e8bcc-f8fe-4b5a-971a-3e78b28fcb81. {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 1257.532971] env[61215]: DEBUG oslo_concurrency.lockutils [req-b907c76e-6505-4e70-b7dd-b5f826e7bde9 req-3792d96d-9044-4689-8009-30460cd71cab service nova] Acquiring lock "refresh_cache-d2756d6f-d1f6-4408-83a6-3cbae8bf8b04" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1257.532971] env[61215]: DEBUG oslo_concurrency.lockutils [req-b907c76e-6505-4e70-b7dd-b5f826e7bde9 req-3792d96d-9044-4689-8009-30460cd71cab service nova] Acquired lock "refresh_cache-d2756d6f-d1f6-4408-83a6-3cbae8bf8b04" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1257.535078] env[61215]: DEBUG nova.network.neutron [req-b907c76e-6505-4e70-b7dd-b5f826e7bde9 req-3792d96d-9044-4689-8009-30460cd71cab service nova] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Refreshing network info cache for port fd9e8bcc-f8fe-4b5a-971a-3e78b28fcb81 {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1258.907689] env[61215]: DEBUG nova.compute.manager [req-84759df4-7f1c-4ffd-b1e8-7cf99596e294 req-af0f4476-38f1-468e-bc7e-44962f09b336 service nova] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Received event network-vif-plugged-5d3e9a06-08f1-40ee-9d62-5fdccb302916 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1258.907936] env[61215]: DEBUG oslo_concurrency.lockutils [req-84759df4-7f1c-4ffd-b1e8-7cf99596e294 req-af0f4476-38f1-468e-bc7e-44962f09b336 service nova] Acquiring lock "ef0f6995-b272-4a45-a09d-5d8d38ffe23c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1258.908217] env[61215]: DEBUG oslo_concurrency.lockutils [req-84759df4-7f1c-4ffd-b1e8-7cf99596e294 req-af0f4476-38f1-468e-bc7e-44962f09b336 service nova] Lock "ef0f6995-b272-4a45-a09d-5d8d38ffe23c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1258.908383] env[61215]: DEBUG oslo_concurrency.lockutils [req-84759df4-7f1c-4ffd-b1e8-7cf99596e294 req-af0f4476-38f1-468e-bc7e-44962f09b336 service nova] Lock "ef0f6995-b272-4a45-a09d-5d8d38ffe23c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1258.908535] env[61215]: DEBUG nova.compute.manager [req-84759df4-7f1c-4ffd-b1e8-7cf99596e294 req-af0f4476-38f1-468e-bc7e-44962f09b336 service nova] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] No waiting events found dispatching network-vif-plugged-5d3e9a06-08f1-40ee-9d62-5fdccb302916 {{(pid=61215) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1258.909918] env[61215]: WARNING nova.compute.manager [req-84759df4-7f1c-4ffd-b1e8-7cf99596e294 req-af0f4476-38f1-468e-bc7e-44962f09b336 service nova] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Received unexpected event network-vif-plugged-5d3e9a06-08f1-40ee-9d62-5fdccb302916 for instance with vm_state building and task_state spawning. [ 1258.909918] env[61215]: DEBUG nova.compute.manager [req-84759df4-7f1c-4ffd-b1e8-7cf99596e294 req-af0f4476-38f1-468e-bc7e-44962f09b336 service nova] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Received event network-changed-5d3e9a06-08f1-40ee-9d62-5fdccb302916 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1258.909918] env[61215]: DEBUG nova.compute.manager [req-84759df4-7f1c-4ffd-b1e8-7cf99596e294 req-af0f4476-38f1-468e-bc7e-44962f09b336 service nova] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Refreshing instance network info cache due to event network-changed-5d3e9a06-08f1-40ee-9d62-5fdccb302916. {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 1258.909918] env[61215]: DEBUG oslo_concurrency.lockutils [req-84759df4-7f1c-4ffd-b1e8-7cf99596e294 req-af0f4476-38f1-468e-bc7e-44962f09b336 service nova] Acquiring lock "refresh_cache-ef0f6995-b272-4a45-a09d-5d8d38ffe23c" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1258.969230] env[61215]: DEBUG nova.network.neutron [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Updating instance_info_cache with network_info: [{"id": "5d3e9a06-08f1-40ee-9d62-5fdccb302916", "address": "fa:16:3e:b6:31:36", "network": {"id": "7eeb68c4-c3ff-4e6f-b7f9-6320ce2d34b3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.103", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9bc3f57c82894c5d9e08b66e77a25dc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2383c27-232e-4745-9b0a-2dcbaabb188b", "external-id": "nsx-vlan-transportzone-350", "segmentation_id": 350, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d3e9a06-08", "ovs_interfaceid": "5d3e9a06-08f1-40ee-9d62-5fdccb302916", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1258.990945] env[61215]: DEBUG oslo_concurrency.lockutils [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Releasing lock "refresh_cache-ef0f6995-b272-4a45-a09d-5d8d38ffe23c" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1258.990945] env[61215]: DEBUG nova.compute.manager [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Instance network_info: |[{"id": "5d3e9a06-08f1-40ee-9d62-5fdccb302916", "address": "fa:16:3e:b6:31:36", "network": {"id": "7eeb68c4-c3ff-4e6f-b7f9-6320ce2d34b3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.103", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9bc3f57c82894c5d9e08b66e77a25dc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2383c27-232e-4745-9b0a-2dcbaabb188b", "external-id": "nsx-vlan-transportzone-350", "segmentation_id": 350, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d3e9a06-08", "ovs_interfaceid": "5d3e9a06-08f1-40ee-9d62-5fdccb302916", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1258.991481] env[61215]: DEBUG oslo_concurrency.lockutils [req-84759df4-7f1c-4ffd-b1e8-7cf99596e294 req-af0f4476-38f1-468e-bc7e-44962f09b336 service nova] Acquired lock "refresh_cache-ef0f6995-b272-4a45-a09d-5d8d38ffe23c" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1258.991682] env[61215]: DEBUG nova.network.neutron [req-84759df4-7f1c-4ffd-b1e8-7cf99596e294 req-af0f4476-38f1-468e-bc7e-44962f09b336 service nova] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Refreshing network info cache for port 5d3e9a06-08f1-40ee-9d62-5fdccb302916 {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1258.993187] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b6:31:36', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c2383c27-232e-4745-9b0a-2dcbaabb188b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5d3e9a06-08f1-40ee-9d62-5fdccb302916', 'vif_model': 'vmxnet3'}] {{(pid=61215) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1259.004196] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Creating folder: Project (074004f542ab46afb2abc965bdc00892). Parent ref: group-v352463. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1259.005567] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-78f34b6c-9f18-4f49-b520-871d3f08408e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.022720] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Created folder: Project (074004f542ab46afb2abc965bdc00892) in parent group-v352463. [ 1259.022974] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Creating folder: Instances. Parent ref: group-v352479. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1259.023778] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9241537b-53e7-4f7a-a20e-06f77db65f5b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.037388] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Created folder: Instances in parent group-v352479. [ 1259.037640] env[61215]: DEBUG oslo.service.loopingcall [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1259.038162] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Creating VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1259.038452] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cd82ff76-32bd-46ba-a22e-ae2d2feb7fca {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.063163] env[61215]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1259.063163] env[61215]: value = "task-1690272" [ 1259.063163] env[61215]: _type = "Task" [ 1259.063163] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1259.073440] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690272, 'name': CreateVM_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.102209] env[61215]: DEBUG nova.network.neutron [req-b907c76e-6505-4e70-b7dd-b5f826e7bde9 req-3792d96d-9044-4689-8009-30460cd71cab service nova] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Updated VIF entry in instance network info cache for port fd9e8bcc-f8fe-4b5a-971a-3e78b28fcb81. {{(pid=61215) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1259.102319] env[61215]: DEBUG nova.network.neutron [req-b907c76e-6505-4e70-b7dd-b5f826e7bde9 req-3792d96d-9044-4689-8009-30460cd71cab service nova] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Updating instance_info_cache with network_info: [{"id": "fd9e8bcc-f8fe-4b5a-971a-3e78b28fcb81", "address": "fa:16:3e:9e:b4:1c", "network": {"id": "7eeb68c4-c3ff-4e6f-b7f9-6320ce2d34b3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.26", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9bc3f57c82894c5d9e08b66e77a25dc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2383c27-232e-4745-9b0a-2dcbaabb188b", "external-id": "nsx-vlan-transportzone-350", "segmentation_id": 350, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd9e8bcc-f8", "ovs_interfaceid": "fd9e8bcc-f8fe-4b5a-971a-3e78b28fcb81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1259.124803] env[61215]: DEBUG oslo_concurrency.lockutils [req-b907c76e-6505-4e70-b7dd-b5f826e7bde9 req-3792d96d-9044-4689-8009-30460cd71cab service nova] Releasing lock "refresh_cache-d2756d6f-d1f6-4408-83a6-3cbae8bf8b04" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1259.125167] env[61215]: DEBUG nova.compute.manager [req-b907c76e-6505-4e70-b7dd-b5f826e7bde9 req-3792d96d-9044-4689-8009-30460cd71cab service nova] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Received event network-vif-plugged-81e39a76-5d88-49b2-8442-77cf90acfb2f {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1259.125388] env[61215]: DEBUG oslo_concurrency.lockutils [req-b907c76e-6505-4e70-b7dd-b5f826e7bde9 req-3792d96d-9044-4689-8009-30460cd71cab service nova] Acquiring lock "805748d7-e459-4608-a02d-05ac56c48290-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1259.125616] env[61215]: DEBUG oslo_concurrency.lockutils [req-b907c76e-6505-4e70-b7dd-b5f826e7bde9 req-3792d96d-9044-4689-8009-30460cd71cab service nova] Lock "805748d7-e459-4608-a02d-05ac56c48290-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1259.125808] env[61215]: DEBUG oslo_concurrency.lockutils [req-b907c76e-6505-4e70-b7dd-b5f826e7bde9 req-3792d96d-9044-4689-8009-30460cd71cab service nova] Lock "805748d7-e459-4608-a02d-05ac56c48290-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1259.126056] env[61215]: DEBUG nova.compute.manager [req-b907c76e-6505-4e70-b7dd-b5f826e7bde9 req-3792d96d-9044-4689-8009-30460cd71cab service nova] [instance: 805748d7-e459-4608-a02d-05ac56c48290] No waiting events found dispatching network-vif-plugged-81e39a76-5d88-49b2-8442-77cf90acfb2f {{(pid=61215) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1259.126157] env[61215]: WARNING nova.compute.manager [req-b907c76e-6505-4e70-b7dd-b5f826e7bde9 req-3792d96d-9044-4689-8009-30460cd71cab service nova] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Received unexpected event network-vif-plugged-81e39a76-5d88-49b2-8442-77cf90acfb2f for instance with vm_state building and task_state spawning. [ 1259.126336] env[61215]: DEBUG nova.compute.manager [req-b907c76e-6505-4e70-b7dd-b5f826e7bde9 req-3792d96d-9044-4689-8009-30460cd71cab service nova] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Received event network-changed-81e39a76-5d88-49b2-8442-77cf90acfb2f {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1259.126499] env[61215]: DEBUG nova.compute.manager [req-b907c76e-6505-4e70-b7dd-b5f826e7bde9 req-3792d96d-9044-4689-8009-30460cd71cab service nova] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Refreshing instance network info cache due to event network-changed-81e39a76-5d88-49b2-8442-77cf90acfb2f. {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 1259.126689] env[61215]: DEBUG oslo_concurrency.lockutils [req-b907c76e-6505-4e70-b7dd-b5f826e7bde9 req-3792d96d-9044-4689-8009-30460cd71cab service nova] Acquiring lock "refresh_cache-805748d7-e459-4608-a02d-05ac56c48290" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1259.126936] env[61215]: DEBUG oslo_concurrency.lockutils [req-b907c76e-6505-4e70-b7dd-b5f826e7bde9 req-3792d96d-9044-4689-8009-30460cd71cab service nova] Acquired lock "refresh_cache-805748d7-e459-4608-a02d-05ac56c48290" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1259.127099] env[61215]: DEBUG nova.network.neutron [req-b907c76e-6505-4e70-b7dd-b5f826e7bde9 req-3792d96d-9044-4689-8009-30460cd71cab service nova] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Refreshing network info cache for port 81e39a76-5d88-49b2-8442-77cf90acfb2f {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1259.394755] env[61215]: DEBUG nova.network.neutron [req-bf8ec31d-8f6f-4efd-985b-f798ae653a44 req-90b87780-643e-43e2-b91d-00d6894bb0d1 service nova] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Updated VIF entry in instance network info cache for port 65b64270-4c6d-478e-bd7c-0fed0f33fc2f. {{(pid=61215) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1259.395182] env[61215]: DEBUG nova.network.neutron [req-bf8ec31d-8f6f-4efd-985b-f798ae653a44 req-90b87780-643e-43e2-b91d-00d6894bb0d1 service nova] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Updating instance_info_cache with network_info: [{"id": "65b64270-4c6d-478e-bd7c-0fed0f33fc2f", "address": "fa:16:3e:05:4c:9a", "network": {"id": "7eeb68c4-c3ff-4e6f-b7f9-6320ce2d34b3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.87", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9bc3f57c82894c5d9e08b66e77a25dc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2383c27-232e-4745-9b0a-2dcbaabb188b", "external-id": "nsx-vlan-transportzone-350", "segmentation_id": 350, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap65b64270-4c", "ovs_interfaceid": "65b64270-4c6d-478e-bd7c-0fed0f33fc2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1259.406738] env[61215]: DEBUG oslo_concurrency.lockutils [req-bf8ec31d-8f6f-4efd-985b-f798ae653a44 req-90b87780-643e-43e2-b91d-00d6894bb0d1 service nova] Releasing lock "refresh_cache-351c2ada-945a-4f0b-8fa9-47e3412c5e05" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1259.580572] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690272, 'name': CreateVM_Task, 'duration_secs': 0.388687} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1259.582418] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Created VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1259.583210] env[61215]: DEBUG oslo_concurrency.lockutils [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1259.583782] env[61215]: DEBUG oslo_concurrency.lockutils [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1259.584386] env[61215]: DEBUG oslo_concurrency.lockutils [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1259.584660] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58a9ac19-d017-4653-b869-1a60ec89f3dc {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.593204] env[61215]: DEBUG oslo_vmware.api [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Waiting for the task: (returnval){ [ 1259.593204] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52dad4cf-7ac0-72cf-41af-6c3f00b3f35f" [ 1259.593204] env[61215]: _type = "Task" [ 1259.593204] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1259.606966] env[61215]: DEBUG oslo_concurrency.lockutils [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1259.607307] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Processing image e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1259.607532] env[61215]: DEBUG oslo_concurrency.lockutils [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1259.983548] env[61215]: DEBUG nova.network.neutron [req-b907c76e-6505-4e70-b7dd-b5f826e7bde9 req-3792d96d-9044-4689-8009-30460cd71cab service nova] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Updated VIF entry in instance network info cache for port 81e39a76-5d88-49b2-8442-77cf90acfb2f. {{(pid=61215) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1259.983548] env[61215]: DEBUG nova.network.neutron [req-b907c76e-6505-4e70-b7dd-b5f826e7bde9 req-3792d96d-9044-4689-8009-30460cd71cab service nova] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Updating instance_info_cache with network_info: [{"id": "81e39a76-5d88-49b2-8442-77cf90acfb2f", "address": "fa:16:3e:4a:91:fe", "network": {"id": "7eeb68c4-c3ff-4e6f-b7f9-6320ce2d34b3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9bc3f57c82894c5d9e08b66e77a25dc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2383c27-232e-4745-9b0a-2dcbaabb188b", "external-id": "nsx-vlan-transportzone-350", "segmentation_id": 350, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap81e39a76-5d", "ovs_interfaceid": "81e39a76-5d88-49b2-8442-77cf90acfb2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1260.002504] env[61215]: DEBUG oslo_concurrency.lockutils [req-b907c76e-6505-4e70-b7dd-b5f826e7bde9 req-3792d96d-9044-4689-8009-30460cd71cab service nova] Releasing lock "refresh_cache-805748d7-e459-4608-a02d-05ac56c48290" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1260.175530] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Acquiring lock "67068a42-eba7-4529-9ebf-43d6865362b1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1260.175995] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Lock "67068a42-eba7-4529-9ebf-43d6865362b1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1260.197443] env[61215]: DEBUG nova.compute.manager [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1260.289875] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1260.296646] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.005s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1260.296646] env[61215]: INFO nova.compute.claims [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1260.572556] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25c33aac-af07-4434-a2ef-c413e5d0abaa {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.585019] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-addad251-c547-4bb3-9c11-e380bf98d841 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.622125] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6d9d9b5-723d-4da7-a275-cda85030afbd {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.630686] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2413dba6-ffb0-477c-952a-1ed3208e7153 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.648178] env[61215]: DEBUG nova.compute.provider_tree [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1260.660259] env[61215]: DEBUG nova.scheduler.client.report [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1260.683635] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.389s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1260.684139] env[61215]: DEBUG nova.compute.manager [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Start building networks asynchronously for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1260.736695] env[61215]: DEBUG nova.compute.utils [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Using /dev/sd instead of None {{(pid=61215) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1260.738103] env[61215]: DEBUG nova.compute.manager [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Allocating IP information in the background. {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1260.738272] env[61215]: DEBUG nova.network.neutron [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] allocate_for_instance() {{(pid=61215) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1260.751029] env[61215]: DEBUG nova.compute.manager [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Start building block device mappings for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1260.825387] env[61215]: DEBUG nova.compute.manager [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Start spawning the instance on the hypervisor. {{(pid=61215) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1260.860942] env[61215]: DEBUG nova.virt.hardware [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:05:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:04:45Z,direct_url=,disk_format='vmdk',id=e91f0c25-9ff9-4937-8440-f47cfb2028bc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9bc3f57c82894c5d9e08b66e77a25dc5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:04:46Z,virtual_size=,visibility=), allow threads: False {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1260.862229] env[61215]: DEBUG nova.virt.hardware [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Flavor limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1260.862229] env[61215]: DEBUG nova.virt.hardware [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Image limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1260.862229] env[61215]: DEBUG nova.virt.hardware [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Flavor pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1260.862229] env[61215]: DEBUG nova.virt.hardware [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Image pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1260.862229] env[61215]: DEBUG nova.virt.hardware [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1260.862509] env[61215]: DEBUG nova.virt.hardware [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1260.864051] env[61215]: DEBUG nova.virt.hardware [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1260.864112] env[61215]: DEBUG nova.virt.hardware [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Got 1 possible topologies {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1260.866686] env[61215]: DEBUG nova.virt.hardware [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1260.866686] env[61215]: DEBUG nova.virt.hardware [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1260.866686] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1926723e-91de-4209-9238-36e052c67ea7 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.878956] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2ec02ef-2e7e-4bdd-872c-fd9a1dfe9283 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.196428] env[61215]: DEBUG nova.policy [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '09567ee0b02342dfb2241e8121ea3fe6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c427ab1108404e5aa7ae1659794485a3', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61215) authorize /opt/stack/nova/nova/policy.py:203}} [ 1261.206277] env[61215]: DEBUG nova.network.neutron [req-84759df4-7f1c-4ffd-b1e8-7cf99596e294 req-af0f4476-38f1-468e-bc7e-44962f09b336 service nova] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Updated VIF entry in instance network info cache for port 5d3e9a06-08f1-40ee-9d62-5fdccb302916. {{(pid=61215) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1261.206277] env[61215]: DEBUG nova.network.neutron [req-84759df4-7f1c-4ffd-b1e8-7cf99596e294 req-af0f4476-38f1-468e-bc7e-44962f09b336 service nova] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Updating instance_info_cache with network_info: [{"id": "5d3e9a06-08f1-40ee-9d62-5fdccb302916", "address": "fa:16:3e:b6:31:36", "network": {"id": "7eeb68c4-c3ff-4e6f-b7f9-6320ce2d34b3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.103", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9bc3f57c82894c5d9e08b66e77a25dc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2383c27-232e-4745-9b0a-2dcbaabb188b", "external-id": "nsx-vlan-transportzone-350", "segmentation_id": 350, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d3e9a06-08", "ovs_interfaceid": "5d3e9a06-08f1-40ee-9d62-5fdccb302916", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1261.222705] env[61215]: DEBUG oslo_concurrency.lockutils [req-84759df4-7f1c-4ffd-b1e8-7cf99596e294 req-af0f4476-38f1-468e-bc7e-44962f09b336 service nova] Releasing lock "refresh_cache-ef0f6995-b272-4a45-a09d-5d8d38ffe23c" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1263.651151] env[61215]: DEBUG nova.network.neutron [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Successfully created port: 921c2c9e-47cd-443e-ad1f-bfe99e75ac09 {{(pid=61215) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1265.154698] env[61215]: DEBUG oslo_concurrency.lockutils [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Acquiring lock "ad40882f-de01-4bee-81dd-e91d07248d22" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1265.155068] env[61215]: DEBUG oslo_concurrency.lockutils [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Lock "ad40882f-de01-4bee-81dd-e91d07248d22" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1265.172135] env[61215]: DEBUG nova.compute.manager [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1265.275886] env[61215]: DEBUG oslo_concurrency.lockutils [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1265.276152] env[61215]: DEBUG oslo_concurrency.lockutils [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1265.277634] env[61215]: INFO nova.compute.claims [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1265.493279] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e95d4876-0592-48c4-b042-5823f6e4dc4a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.501845] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf56d8cf-9c18-4252-b327-36364ad1faf2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.536177] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae697a71-a4b3-4772-aa0b-2b7c89351321 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.544656] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43045c6f-4679-4da6-90e5-7eb416623534 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.559186] env[61215]: DEBUG nova.compute.provider_tree [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1265.569389] env[61215]: DEBUG nova.scheduler.client.report [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1265.594989] env[61215]: DEBUG oslo_concurrency.lockutils [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.318s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1265.595543] env[61215]: DEBUG nova.compute.manager [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Start building networks asynchronously for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1265.653254] env[61215]: DEBUG nova.compute.utils [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Using /dev/sd instead of None {{(pid=61215) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1265.655057] env[61215]: DEBUG nova.compute.manager [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Not allocating networking since 'none' was specified. {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1969}} [ 1265.670113] env[61215]: DEBUG nova.compute.manager [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Start building block device mappings for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1265.756828] env[61215]: DEBUG nova.compute.manager [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Start spawning the instance on the hypervisor. {{(pid=61215) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1265.789577] env[61215]: DEBUG nova.virt.hardware [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:05:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:04:45Z,direct_url=,disk_format='vmdk',id=e91f0c25-9ff9-4937-8440-f47cfb2028bc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9bc3f57c82894c5d9e08b66e77a25dc5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:04:46Z,virtual_size=,visibility=), allow threads: False {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1265.789910] env[61215]: DEBUG nova.virt.hardware [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Flavor limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1265.790137] env[61215]: DEBUG nova.virt.hardware [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Image limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1265.790677] env[61215]: DEBUG nova.virt.hardware [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Flavor pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1265.790677] env[61215]: DEBUG nova.virt.hardware [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Image pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1265.790677] env[61215]: DEBUG nova.virt.hardware [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1265.790826] env[61215]: DEBUG nova.virt.hardware [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1265.790920] env[61215]: DEBUG nova.virt.hardware [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1265.791418] env[61215]: DEBUG nova.virt.hardware [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Got 1 possible topologies {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1265.791644] env[61215]: DEBUG nova.virt.hardware [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1265.791833] env[61215]: DEBUG nova.virt.hardware [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1265.793750] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0540752-3d4b-43ad-9102-b5c71bfd4ba8 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.805940] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17b80dea-ea33-4b9b-bd11-58039c17f87e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.818241] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Instance VIF info [] {{(pid=61215) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1265.827427] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Creating folder: Project (73a0139574374714834fc05ef3084bd4). Parent ref: group-v352463. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1265.828430] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3da728ad-eb32-4a34-88b8-a32a6ba3bfa6 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.839361] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Created folder: Project (73a0139574374714834fc05ef3084bd4) in parent group-v352463. [ 1265.839450] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Creating folder: Instances. Parent ref: group-v352482. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1265.839683] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-598fbe58-3250-4902-855b-9275b165b230 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.850295] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Created folder: Instances in parent group-v352482. [ 1265.850641] env[61215]: DEBUG oslo.service.loopingcall [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1265.850805] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Creating VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1265.851052] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7245c858-4c39-4a41-94a6-b98e90866b7e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.871618] env[61215]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1265.871618] env[61215]: value = "task-1690275" [ 1265.871618] env[61215]: _type = "Task" [ 1265.871618] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1265.879810] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690275, 'name': CreateVM_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1266.384192] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690275, 'name': CreateVM_Task, 'duration_secs': 0.30639} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1266.384501] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Created VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1266.384779] env[61215]: DEBUG oslo_concurrency.lockutils [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1266.385155] env[61215]: DEBUG oslo_concurrency.lockutils [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1266.385302] env[61215]: DEBUG oslo_concurrency.lockutils [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1266.385687] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac0f7b09-ec1f-4718-8efe-25fe72d8d7a0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.391880] env[61215]: DEBUG oslo_vmware.api [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Waiting for the task: (returnval){ [ 1266.391880] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52a571bf-a46d-c7a2-cc44-a4b196e02efd" [ 1266.391880] env[61215]: _type = "Task" [ 1266.391880] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1266.403266] env[61215]: DEBUG oslo_vmware.api [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52a571bf-a46d-c7a2-cc44-a4b196e02efd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1266.907133] env[61215]: DEBUG oslo_concurrency.lockutils [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1266.907407] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Processing image e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1266.907622] env[61215]: DEBUG oslo_concurrency.lockutils [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1267.864114] env[61215]: DEBUG nova.network.neutron [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Successfully updated port: 921c2c9e-47cd-443e-ad1f-bfe99e75ac09 {{(pid=61215) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1267.882046] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Acquiring lock "refresh_cache-67068a42-eba7-4529-9ebf-43d6865362b1" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1267.882148] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Acquired lock "refresh_cache-67068a42-eba7-4529-9ebf-43d6865362b1" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1267.882235] env[61215]: DEBUG nova.network.neutron [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1268.023231] env[61215]: DEBUG nova.network.neutron [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1268.977279] env[61215]: DEBUG oslo_concurrency.lockutils [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Acquiring lock "97dae204-f706-41b5-bf9f-b320d022b2f3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1268.977279] env[61215]: DEBUG oslo_concurrency.lockutils [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Lock "97dae204-f706-41b5-bf9f-b320d022b2f3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1268.994048] env[61215]: DEBUG nova.compute.manager [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1269.081515] env[61215]: DEBUG oslo_concurrency.lockutils [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1269.081772] env[61215]: DEBUG oslo_concurrency.lockutils [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1269.083696] env[61215]: INFO nova.compute.claims [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1269.352339] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfc45e8d-0d52-41c8-8d1e-da69abcde8fb {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.360263] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1086acdb-efbf-4f89-a236-40c0ba6fc8a1 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.401000] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b5f0666-eced-4940-a0ba-16ea111cca87 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.410203] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c39ee31-ba5d-4146-8474-10549e136321 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.431650] env[61215]: DEBUG nova.compute.provider_tree [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1269.444411] env[61215]: DEBUG nova.scheduler.client.report [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1269.468348] env[61215]: DEBUG oslo_concurrency.lockutils [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.386s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1269.468958] env[61215]: DEBUG nova.compute.manager [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Start building networks asynchronously for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1269.520649] env[61215]: DEBUG nova.compute.utils [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Using /dev/sd instead of None {{(pid=61215) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1269.521977] env[61215]: DEBUG nova.compute.manager [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Allocating IP information in the background. {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1269.522437] env[61215]: DEBUG nova.network.neutron [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] allocate_for_instance() {{(pid=61215) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1269.545299] env[61215]: DEBUG nova.compute.manager [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Start building block device mappings for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1269.607208] env[61215]: DEBUG nova.network.neutron [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Updating instance_info_cache with network_info: [{"id": "921c2c9e-47cd-443e-ad1f-bfe99e75ac09", "address": "fa:16:3e:28:c7:07", "network": {"id": "7eeb68c4-c3ff-4e6f-b7f9-6320ce2d34b3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.225", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9bc3f57c82894c5d9e08b66e77a25dc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2383c27-232e-4745-9b0a-2dcbaabb188b", "external-id": "nsx-vlan-transportzone-350", "segmentation_id": 350, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap921c2c9e-47", "ovs_interfaceid": "921c2c9e-47cd-443e-ad1f-bfe99e75ac09", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1269.627288] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Releasing lock "refresh_cache-67068a42-eba7-4529-9ebf-43d6865362b1" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1269.627601] env[61215]: DEBUG nova.compute.manager [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Instance network_info: |[{"id": "921c2c9e-47cd-443e-ad1f-bfe99e75ac09", "address": "fa:16:3e:28:c7:07", "network": {"id": "7eeb68c4-c3ff-4e6f-b7f9-6320ce2d34b3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.225", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9bc3f57c82894c5d9e08b66e77a25dc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2383c27-232e-4745-9b0a-2dcbaabb188b", "external-id": "nsx-vlan-transportzone-350", "segmentation_id": 350, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap921c2c9e-47", "ovs_interfaceid": "921c2c9e-47cd-443e-ad1f-bfe99e75ac09", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1269.628151] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:28:c7:07', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c2383c27-232e-4745-9b0a-2dcbaabb188b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '921c2c9e-47cd-443e-ad1f-bfe99e75ac09', 'vif_model': 'vmxnet3'}] {{(pid=61215) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1269.636579] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Creating folder: Project (c427ab1108404e5aa7ae1659794485a3). Parent ref: group-v352463. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1269.639420] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-16cd05dc-ee3c-44d2-bc74-952533eafefa {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.653618] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Created folder: Project (c427ab1108404e5aa7ae1659794485a3) in parent group-v352463. [ 1269.653814] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Creating folder: Instances. Parent ref: group-v352485. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1269.654727] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5b6a1d7f-55cb-42a4-aca2-ca0fbdbc0351 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.657619] env[61215]: DEBUG nova.compute.manager [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Start spawning the instance on the hypervisor. {{(pid=61215) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1269.668296] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Created folder: Instances in parent group-v352485. [ 1269.668522] env[61215]: DEBUG oslo.service.loopingcall [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1269.668718] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Creating VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1269.672292] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-859bce86-fcf3-4e1a-a371-958d508649d2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.699771] env[61215]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1269.699771] env[61215]: value = "task-1690278" [ 1269.699771] env[61215]: _type = "Task" [ 1269.699771] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1269.701956] env[61215]: DEBUG nova.virt.hardware [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:05:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:04:45Z,direct_url=,disk_format='vmdk',id=e91f0c25-9ff9-4937-8440-f47cfb2028bc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9bc3f57c82894c5d9e08b66e77a25dc5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:04:46Z,virtual_size=,visibility=), allow threads: False {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1269.702623] env[61215]: DEBUG nova.virt.hardware [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Flavor limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1269.703454] env[61215]: DEBUG nova.virt.hardware [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Image limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1269.703454] env[61215]: DEBUG nova.virt.hardware [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Flavor pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1269.703454] env[61215]: DEBUG nova.virt.hardware [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Image pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1269.703454] env[61215]: DEBUG nova.virt.hardware [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1269.704318] env[61215]: DEBUG nova.virt.hardware [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1269.704318] env[61215]: DEBUG nova.virt.hardware [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1269.704318] env[61215]: DEBUG nova.virt.hardware [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Got 1 possible topologies {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1269.704318] env[61215]: DEBUG nova.virt.hardware [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1269.704318] env[61215]: DEBUG nova.virt.hardware [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1269.705062] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91c81c3f-796c-41dc-83a8-5145ab451714 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.720221] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690278, 'name': CreateVM_Task} progress is 6%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.721552] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d612efcb-ff9a-4a72-89b2-7034597a9c30 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.813847] env[61215]: DEBUG nova.policy [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b78014700fdc4e849c2711d3fc8138b9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a8a305c6ce3c4e368e8a8c32d543c655', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61215) authorize /opt/stack/nova/nova/policy.py:203}} [ 1269.925972] env[61215]: DEBUG nova.compute.manager [req-fd27fd4c-103f-4b27-8887-a7791f6e3674 req-5cadb8b3-b53a-449b-8584-c376a2468aba service nova] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Received event network-vif-plugged-921c2c9e-47cd-443e-ad1f-bfe99e75ac09 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1269.926220] env[61215]: DEBUG oslo_concurrency.lockutils [req-fd27fd4c-103f-4b27-8887-a7791f6e3674 req-5cadb8b3-b53a-449b-8584-c376a2468aba service nova] Acquiring lock "67068a42-eba7-4529-9ebf-43d6865362b1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1269.926431] env[61215]: DEBUG oslo_concurrency.lockutils [req-fd27fd4c-103f-4b27-8887-a7791f6e3674 req-5cadb8b3-b53a-449b-8584-c376a2468aba service nova] Lock "67068a42-eba7-4529-9ebf-43d6865362b1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1269.926596] env[61215]: DEBUG oslo_concurrency.lockutils [req-fd27fd4c-103f-4b27-8887-a7791f6e3674 req-5cadb8b3-b53a-449b-8584-c376a2468aba service nova] Lock "67068a42-eba7-4529-9ebf-43d6865362b1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1269.926863] env[61215]: DEBUG nova.compute.manager [req-fd27fd4c-103f-4b27-8887-a7791f6e3674 req-5cadb8b3-b53a-449b-8584-c376a2468aba service nova] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] No waiting events found dispatching network-vif-plugged-921c2c9e-47cd-443e-ad1f-bfe99e75ac09 {{(pid=61215) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1269.926999] env[61215]: WARNING nova.compute.manager [req-fd27fd4c-103f-4b27-8887-a7791f6e3674 req-5cadb8b3-b53a-449b-8584-c376a2468aba service nova] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Received unexpected event network-vif-plugged-921c2c9e-47cd-443e-ad1f-bfe99e75ac09 for instance with vm_state building and task_state spawning. [ 1270.219292] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690278, 'name': CreateVM_Task, 'duration_secs': 0.338273} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1270.219292] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Created VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1270.219292] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1270.219292] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1270.220874] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1270.221230] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9b67d06-5ee0-4626-8f7f-305c4ad25db3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.227598] env[61215]: DEBUG oslo_vmware.api [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Waiting for the task: (returnval){ [ 1270.227598] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52be9807-d376-d604-9c3c-eef21978f4b6" [ 1270.227598] env[61215]: _type = "Task" [ 1270.227598] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1270.239035] env[61215]: DEBUG oslo_vmware.api [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52be9807-d376-d604-9c3c-eef21978f4b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.739035] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1270.739035] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Processing image e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1270.739035] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1272.199290] env[61215]: DEBUG nova.network.neutron [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Successfully created port: 45151659-bc1f-4b2f-9f1d-6433eff373b1 {{(pid=61215) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1273.637715] env[61215]: DEBUG nova.compute.manager [req-15ba938a-c080-4ab9-ade6-ef6ae77f22c7 req-f810e44a-a7de-4c5b-969a-f457bf11f2d9 service nova] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Received event network-changed-921c2c9e-47cd-443e-ad1f-bfe99e75ac09 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1273.637991] env[61215]: DEBUG nova.compute.manager [req-15ba938a-c080-4ab9-ade6-ef6ae77f22c7 req-f810e44a-a7de-4c5b-969a-f457bf11f2d9 service nova] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Refreshing instance network info cache due to event network-changed-921c2c9e-47cd-443e-ad1f-bfe99e75ac09. {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 1273.638155] env[61215]: DEBUG oslo_concurrency.lockutils [req-15ba938a-c080-4ab9-ade6-ef6ae77f22c7 req-f810e44a-a7de-4c5b-969a-f457bf11f2d9 service nova] Acquiring lock "refresh_cache-67068a42-eba7-4529-9ebf-43d6865362b1" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1273.638288] env[61215]: DEBUG oslo_concurrency.lockutils [req-15ba938a-c080-4ab9-ade6-ef6ae77f22c7 req-f810e44a-a7de-4c5b-969a-f457bf11f2d9 service nova] Acquired lock "refresh_cache-67068a42-eba7-4529-9ebf-43d6865362b1" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1273.638450] env[61215]: DEBUG nova.network.neutron [req-15ba938a-c080-4ab9-ade6-ef6ae77f22c7 req-f810e44a-a7de-4c5b-969a-f457bf11f2d9 service nova] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Refreshing network info cache for port 921c2c9e-47cd-443e-ad1f-bfe99e75ac09 {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1274.593184] env[61215]: DEBUG nova.network.neutron [req-15ba938a-c080-4ab9-ade6-ef6ae77f22c7 req-f810e44a-a7de-4c5b-969a-f457bf11f2d9 service nova] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Updated VIF entry in instance network info cache for port 921c2c9e-47cd-443e-ad1f-bfe99e75ac09. {{(pid=61215) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1274.593184] env[61215]: DEBUG nova.network.neutron [req-15ba938a-c080-4ab9-ade6-ef6ae77f22c7 req-f810e44a-a7de-4c5b-969a-f457bf11f2d9 service nova] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Updating instance_info_cache with network_info: [{"id": "921c2c9e-47cd-443e-ad1f-bfe99e75ac09", "address": "fa:16:3e:28:c7:07", "network": {"id": "7eeb68c4-c3ff-4e6f-b7f9-6320ce2d34b3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.225", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9bc3f57c82894c5d9e08b66e77a25dc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2383c27-232e-4745-9b0a-2dcbaabb188b", "external-id": "nsx-vlan-transportzone-350", "segmentation_id": 350, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap921c2c9e-47", "ovs_interfaceid": "921c2c9e-47cd-443e-ad1f-bfe99e75ac09", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1274.604740] env[61215]: DEBUG oslo_concurrency.lockutils [req-15ba938a-c080-4ab9-ade6-ef6ae77f22c7 req-f810e44a-a7de-4c5b-969a-f457bf11f2d9 service nova] Releasing lock "refresh_cache-67068a42-eba7-4529-9ebf-43d6865362b1" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1276.029829] env[61215]: DEBUG oslo_concurrency.lockutils [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Acquiring lock "82698789-4c08-453b-a973-1916d1f94af6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1276.030417] env[61215]: DEBUG oslo_concurrency.lockutils [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Lock "82698789-4c08-453b-a973-1916d1f94af6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1276.043058] env[61215]: DEBUG nova.compute.manager [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1276.130825] env[61215]: DEBUG oslo_concurrency.lockutils [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1276.131139] env[61215]: DEBUG oslo_concurrency.lockutils [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1276.132779] env[61215]: INFO nova.compute.claims [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1276.412789] env[61215]: DEBUG nova.network.neutron [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Successfully updated port: 45151659-bc1f-4b2f-9f1d-6433eff373b1 {{(pid=61215) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1276.417895] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29d02f82-76f6-4463-bde8-3e99f8677747 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.426723] env[61215]: DEBUG oslo_concurrency.lockutils [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Acquiring lock "refresh_cache-97dae204-f706-41b5-bf9f-b320d022b2f3" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1276.427039] env[61215]: DEBUG oslo_concurrency.lockutils [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Acquired lock "refresh_cache-97dae204-f706-41b5-bf9f-b320d022b2f3" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1276.427451] env[61215]: DEBUG nova.network.neutron [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1276.433427] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bb8970f-7046-458d-9bf1-1fd713804f18 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.473108] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd48f9d7-3f60-42c5-a627-19933eb678cd {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.482093] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f17eda3-ae08-4e4b-9f2c-c844d4182cd9 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.498103] env[61215]: DEBUG nova.compute.provider_tree [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1276.512074] env[61215]: DEBUG nova.scheduler.client.report [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1276.539017] env[61215]: DEBUG oslo_concurrency.lockutils [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.406s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1276.539017] env[61215]: DEBUG nova.compute.manager [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Start building networks asynchronously for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1276.576951] env[61215]: DEBUG nova.compute.utils [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Using /dev/sd instead of None {{(pid=61215) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1276.578277] env[61215]: DEBUG nova.compute.manager [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Allocating IP information in the background. {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1276.578453] env[61215]: DEBUG nova.network.neutron [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] allocate_for_instance() {{(pid=61215) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1276.591558] env[61215]: DEBUG nova.compute.manager [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Start building block device mappings for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1276.601197] env[61215]: DEBUG nova.network.neutron [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1276.671225] env[61215]: DEBUG nova.compute.manager [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Start spawning the instance on the hypervisor. {{(pid=61215) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1276.699171] env[61215]: DEBUG nova.virt.hardware [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:05:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:04:45Z,direct_url=,disk_format='vmdk',id=e91f0c25-9ff9-4937-8440-f47cfb2028bc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9bc3f57c82894c5d9e08b66e77a25dc5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:04:46Z,virtual_size=,visibility=), allow threads: False {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1276.699430] env[61215]: DEBUG nova.virt.hardware [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Flavor limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1276.699658] env[61215]: DEBUG nova.virt.hardware [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Image limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1276.699773] env[61215]: DEBUG nova.virt.hardware [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Flavor pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1276.699930] env[61215]: DEBUG nova.virt.hardware [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Image pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1276.700679] env[61215]: DEBUG nova.virt.hardware [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1276.701118] env[61215]: DEBUG nova.virt.hardware [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1276.701359] env[61215]: DEBUG nova.virt.hardware [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1276.701578] env[61215]: DEBUG nova.virt.hardware [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Got 1 possible topologies {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1276.701788] env[61215]: DEBUG nova.virt.hardware [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1276.702029] env[61215]: DEBUG nova.virt.hardware [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1276.702910] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a14a0257-e1ae-4ab2-a942-0a0fa16473c8 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.712616] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a759fbd-db4a-4286-8c0f-ea6a52b82ffc {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.742811] env[61215]: DEBUG nova.policy [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '417690a3e4404314a9f59d5455c54f3d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2fcfa7a634a74f0caeb5fa1e6a2c2bee', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61215) authorize /opt/stack/nova/nova/policy.py:203}} [ 1277.766625] env[61215]: DEBUG nova.network.neutron [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Updating instance_info_cache with network_info: [{"id": "45151659-bc1f-4b2f-9f1d-6433eff373b1", "address": "fa:16:3e:a0:33:86", "network": {"id": "34848189-999b-428a-bb01-fa9bec21457c", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1767614235-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8a305c6ce3c4e368e8a8c32d543c655", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dcf5c3f7-4e33-4f21-b323-3673930b789c", "external-id": "nsx-vlan-transportzone-983", "segmentation_id": 983, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45151659-bc", "ovs_interfaceid": "45151659-bc1f-4b2f-9f1d-6433eff373b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1277.785867] env[61215]: DEBUG oslo_concurrency.lockutils [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Releasing lock "refresh_cache-97dae204-f706-41b5-bf9f-b320d022b2f3" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1277.785867] env[61215]: DEBUG nova.compute.manager [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Instance network_info: |[{"id": "45151659-bc1f-4b2f-9f1d-6433eff373b1", "address": "fa:16:3e:a0:33:86", "network": {"id": "34848189-999b-428a-bb01-fa9bec21457c", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1767614235-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8a305c6ce3c4e368e8a8c32d543c655", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dcf5c3f7-4e33-4f21-b323-3673930b789c", "external-id": "nsx-vlan-transportzone-983", "segmentation_id": 983, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45151659-bc", "ovs_interfaceid": "45151659-bc1f-4b2f-9f1d-6433eff373b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1277.786434] env[61215]: DEBUG nova.network.neutron [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Successfully created port: 0126bfe1-a03e-41f9-9df8-72bf1cd07a2f {{(pid=61215) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1277.790846] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a0:33:86', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dcf5c3f7-4e33-4f21-b323-3673930b789c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '45151659-bc1f-4b2f-9f1d-6433eff373b1', 'vif_model': 'vmxnet3'}] {{(pid=61215) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1277.803205] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Creating folder: Project (a8a305c6ce3c4e368e8a8c32d543c655). Parent ref: group-v352463. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1277.804718] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9e3c5287-ab9f-49d0-9e89-f23aa786c85a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.821556] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Created folder: Project (a8a305c6ce3c4e368e8a8c32d543c655) in parent group-v352463. [ 1277.821992] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Creating folder: Instances. Parent ref: group-v352488. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1277.822599] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b24c1ea3-1bd1-4961-8063-1c94e5688624 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.841015] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Created folder: Instances in parent group-v352488. [ 1277.841015] env[61215]: DEBUG oslo.service.loopingcall [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1277.841015] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Creating VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1277.841015] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4f81a2c5-90ae-4ed0-a19d-c64c4f3f423f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.870152] env[61215]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1277.870152] env[61215]: value = "task-1690281" [ 1277.870152] env[61215]: _type = "Task" [ 1277.870152] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.879207] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690281, 'name': CreateVM_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.227932] env[61215]: DEBUG nova.compute.manager [req-ae81ca10-342a-400a-900b-43b28948bf89 req-0e56a7a0-6041-415b-acd6-6f8668490166 service nova] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Received event network-vif-plugged-45151659-bc1f-4b2f-9f1d-6433eff373b1 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1278.229603] env[61215]: DEBUG oslo_concurrency.lockutils [req-ae81ca10-342a-400a-900b-43b28948bf89 req-0e56a7a0-6041-415b-acd6-6f8668490166 service nova] Acquiring lock "97dae204-f706-41b5-bf9f-b320d022b2f3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1278.229603] env[61215]: DEBUG oslo_concurrency.lockutils [req-ae81ca10-342a-400a-900b-43b28948bf89 req-0e56a7a0-6041-415b-acd6-6f8668490166 service nova] Lock "97dae204-f706-41b5-bf9f-b320d022b2f3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1278.229603] env[61215]: DEBUG oslo_concurrency.lockutils [req-ae81ca10-342a-400a-900b-43b28948bf89 req-0e56a7a0-6041-415b-acd6-6f8668490166 service nova] Lock "97dae204-f706-41b5-bf9f-b320d022b2f3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1278.229603] env[61215]: DEBUG nova.compute.manager [req-ae81ca10-342a-400a-900b-43b28948bf89 req-0e56a7a0-6041-415b-acd6-6f8668490166 service nova] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] No waiting events found dispatching network-vif-plugged-45151659-bc1f-4b2f-9f1d-6433eff373b1 {{(pid=61215) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1278.229790] env[61215]: WARNING nova.compute.manager [req-ae81ca10-342a-400a-900b-43b28948bf89 req-0e56a7a0-6041-415b-acd6-6f8668490166 service nova] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Received unexpected event network-vif-plugged-45151659-bc1f-4b2f-9f1d-6433eff373b1 for instance with vm_state building and task_state spawning. [ 1278.389420] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690281, 'name': CreateVM_Task, 'duration_secs': 0.366242} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.389833] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Created VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1278.390899] env[61215]: DEBUG oslo_concurrency.lockutils [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1278.391329] env[61215]: DEBUG oslo_concurrency.lockutils [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1278.391933] env[61215]: DEBUG oslo_concurrency.lockutils [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1278.394069] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6bcfff24-81ac-490b-b741-9163a48e01dc {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.399657] env[61215]: DEBUG oslo_vmware.api [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Waiting for the task: (returnval){ [ 1278.399657] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]529b7375-7e7e-40b4-ba31-1a6a5c7cd24b" [ 1278.399657] env[61215]: _type = "Task" [ 1278.399657] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.409718] env[61215]: DEBUG oslo_vmware.api [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]529b7375-7e7e-40b4-ba31-1a6a5c7cd24b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.910059] env[61215]: DEBUG oslo_concurrency.lockutils [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1278.910363] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Processing image e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1278.910538] env[61215]: DEBUG oslo_concurrency.lockutils [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1279.937658] env[61215]: DEBUG nova.network.neutron [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Successfully updated port: 0126bfe1-a03e-41f9-9df8-72bf1cd07a2f {{(pid=61215) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1279.949890] env[61215]: DEBUG oslo_concurrency.lockutils [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Acquiring lock "refresh_cache-82698789-4c08-453b-a973-1916d1f94af6" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1279.950049] env[61215]: DEBUG oslo_concurrency.lockutils [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Acquired lock "refresh_cache-82698789-4c08-453b-a973-1916d1f94af6" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1279.950207] env[61215]: DEBUG nova.network.neutron [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1280.074173] env[61215]: DEBUG nova.network.neutron [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1280.660696] env[61215]: DEBUG nova.network.neutron [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Updating instance_info_cache with network_info: [{"id": "0126bfe1-a03e-41f9-9df8-72bf1cd07a2f", "address": "fa:16:3e:f2:33:2e", "network": {"id": "a4598ef8-3be2-4e22-85ff-5928842ca89f", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-2107526397-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2fcfa7a634a74f0caeb5fa1e6a2c2bee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e7a4976-597e-4636-990e-6062b5faadee", "external-id": "nsx-vlan-transportzone-847", "segmentation_id": 847, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0126bfe1-a0", "ovs_interfaceid": "0126bfe1-a03e-41f9-9df8-72bf1cd07a2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1280.686820] env[61215]: DEBUG oslo_concurrency.lockutils [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Releasing lock "refresh_cache-82698789-4c08-453b-a973-1916d1f94af6" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1280.687148] env[61215]: DEBUG nova.compute.manager [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Instance network_info: |[{"id": "0126bfe1-a03e-41f9-9df8-72bf1cd07a2f", "address": "fa:16:3e:f2:33:2e", "network": {"id": "a4598ef8-3be2-4e22-85ff-5928842ca89f", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-2107526397-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2fcfa7a634a74f0caeb5fa1e6a2c2bee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e7a4976-597e-4636-990e-6062b5faadee", "external-id": "nsx-vlan-transportzone-847", "segmentation_id": 847, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0126bfe1-a0", "ovs_interfaceid": "0126bfe1-a03e-41f9-9df8-72bf1cd07a2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1280.687570] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f2:33:2e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1e7a4976-597e-4636-990e-6062b5faadee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0126bfe1-a03e-41f9-9df8-72bf1cd07a2f', 'vif_model': 'vmxnet3'}] {{(pid=61215) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1280.695904] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Creating folder: Project (2fcfa7a634a74f0caeb5fa1e6a2c2bee). Parent ref: group-v352463. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1280.697621] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9363411a-0040-4258-8ea5-cd6b957feef0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.702427] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Acquiring lock "0223d7b6-12e1-4418-97f2-012ed41daa7a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1280.702742] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Lock "0223d7b6-12e1-4418-97f2-012ed41daa7a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1280.716228] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Created folder: Project (2fcfa7a634a74f0caeb5fa1e6a2c2bee) in parent group-v352463. [ 1280.716228] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Creating folder: Instances. Parent ref: group-v352491. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1280.716228] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9ea6c40c-e9e7-4a6e-8004-0503623e9020 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.728009] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Created folder: Instances in parent group-v352491. [ 1280.728822] env[61215]: DEBUG oslo.service.loopingcall [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1280.730027] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Creating VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1280.730448] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-47d1560e-4c4c-419f-813c-904db1ccc178 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.757774] env[61215]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1280.757774] env[61215]: value = "task-1690284" [ 1280.757774] env[61215]: _type = "Task" [ 1280.757774] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.767695] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690284, 'name': CreateVM_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.834380] env[61215]: DEBUG oslo_concurrency.lockutils [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Acquiring lock "85d35c2a-bdfe-400b-a90c-082b9ae4ce7d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1280.834636] env[61215]: DEBUG oslo_concurrency.lockutils [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Lock "85d35c2a-bdfe-400b-a90c-082b9ae4ce7d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1281.271490] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690284, 'name': CreateVM_Task, 'duration_secs': 0.331434} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1281.272052] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Created VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1281.272939] env[61215]: DEBUG oslo_concurrency.lockutils [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1281.273274] env[61215]: DEBUG oslo_concurrency.lockutils [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1281.273705] env[61215]: DEBUG oslo_concurrency.lockutils [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1281.275016] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-823c7607-8746-4607-b4a0-f1fd18ab6594 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.280055] env[61215]: DEBUG oslo_vmware.api [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Waiting for the task: (returnval){ [ 1281.280055] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52e5d228-a5a7-00ae-d5fd-2a82f2829fdd" [ 1281.280055] env[61215]: _type = "Task" [ 1281.280055] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.290309] env[61215]: DEBUG oslo_vmware.api [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52e5d228-a5a7-00ae-d5fd-2a82f2829fdd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.570739] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Acquiring lock "44c01d19-58c0-43e6-ab53-d2a6e1edd3b9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1281.570881] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Lock "44c01d19-58c0-43e6-ab53-d2a6e1edd3b9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1281.796113] env[61215]: DEBUG oslo_concurrency.lockutils [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1281.796503] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Processing image e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1281.796730] env[61215]: DEBUG oslo_concurrency.lockutils [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1282.133656] env[61215]: DEBUG oslo_concurrency.lockutils [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Acquiring lock "d49f702b-cd29-4491-938c-0291b351ef20" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1282.133986] env[61215]: DEBUG oslo_concurrency.lockutils [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Lock "d49f702b-cd29-4491-938c-0291b351ef20" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1282.861796] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f97f9e6a-c3ff-48ca-9eae-811e2bdd73d5 tempest-ServersAdminTestJSON-1520134115 tempest-ServersAdminTestJSON-1520134115-project-member] Acquiring lock "44f29d5d-46d6-433a-972b-f971a04200e1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1282.862342] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f97f9e6a-c3ff-48ca-9eae-811e2bdd73d5 tempest-ServersAdminTestJSON-1520134115 tempest-ServersAdminTestJSON-1520134115-project-member] Lock "44f29d5d-46d6-433a-972b-f971a04200e1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1283.111635] env[61215]: DEBUG nova.compute.manager [req-86dad3b4-6f26-4bc6-ad2b-3e1536e8c688 req-75f5e6e9-6700-4acc-b4cc-22cf094b6500 service nova] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Received event network-vif-plugged-0126bfe1-a03e-41f9-9df8-72bf1cd07a2f {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1283.111852] env[61215]: DEBUG oslo_concurrency.lockutils [req-86dad3b4-6f26-4bc6-ad2b-3e1536e8c688 req-75f5e6e9-6700-4acc-b4cc-22cf094b6500 service nova] Acquiring lock "82698789-4c08-453b-a973-1916d1f94af6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1283.113817] env[61215]: DEBUG oslo_concurrency.lockutils [req-86dad3b4-6f26-4bc6-ad2b-3e1536e8c688 req-75f5e6e9-6700-4acc-b4cc-22cf094b6500 service nova] Lock "82698789-4c08-453b-a973-1916d1f94af6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1283.113817] env[61215]: DEBUG oslo_concurrency.lockutils [req-86dad3b4-6f26-4bc6-ad2b-3e1536e8c688 req-75f5e6e9-6700-4acc-b4cc-22cf094b6500 service nova] Lock "82698789-4c08-453b-a973-1916d1f94af6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1283.113817] env[61215]: DEBUG nova.compute.manager [req-86dad3b4-6f26-4bc6-ad2b-3e1536e8c688 req-75f5e6e9-6700-4acc-b4cc-22cf094b6500 service nova] [instance: 82698789-4c08-453b-a973-1916d1f94af6] No waiting events found dispatching network-vif-plugged-0126bfe1-a03e-41f9-9df8-72bf1cd07a2f {{(pid=61215) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1283.113817] env[61215]: WARNING nova.compute.manager [req-86dad3b4-6f26-4bc6-ad2b-3e1536e8c688 req-75f5e6e9-6700-4acc-b4cc-22cf094b6500 service nova] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Received unexpected event network-vif-plugged-0126bfe1-a03e-41f9-9df8-72bf1cd07a2f for instance with vm_state building and task_state spawning. [ 1283.515689] env[61215]: DEBUG nova.compute.manager [req-49ca1461-bcf0-4aac-be8e-0d56073464e7 req-62c66e8a-360c-4058-b86a-63d18da8a174 service nova] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Received event network-changed-45151659-bc1f-4b2f-9f1d-6433eff373b1 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1283.515689] env[61215]: DEBUG nova.compute.manager [req-49ca1461-bcf0-4aac-be8e-0d56073464e7 req-62c66e8a-360c-4058-b86a-63d18da8a174 service nova] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Refreshing instance network info cache due to event network-changed-45151659-bc1f-4b2f-9f1d-6433eff373b1. {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 1283.515689] env[61215]: DEBUG oslo_concurrency.lockutils [req-49ca1461-bcf0-4aac-be8e-0d56073464e7 req-62c66e8a-360c-4058-b86a-63d18da8a174 service nova] Acquiring lock "refresh_cache-97dae204-f706-41b5-bf9f-b320d022b2f3" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1283.515689] env[61215]: DEBUG oslo_concurrency.lockutils [req-49ca1461-bcf0-4aac-be8e-0d56073464e7 req-62c66e8a-360c-4058-b86a-63d18da8a174 service nova] Acquired lock "refresh_cache-97dae204-f706-41b5-bf9f-b320d022b2f3" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1283.515689] env[61215]: DEBUG nova.network.neutron [req-49ca1461-bcf0-4aac-be8e-0d56073464e7 req-62c66e8a-360c-4058-b86a-63d18da8a174 service nova] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Refreshing network info cache for port 45151659-bc1f-4b2f-9f1d-6433eff373b1 {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1284.346824] env[61215]: DEBUG nova.network.neutron [req-49ca1461-bcf0-4aac-be8e-0d56073464e7 req-62c66e8a-360c-4058-b86a-63d18da8a174 service nova] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Updated VIF entry in instance network info cache for port 45151659-bc1f-4b2f-9f1d-6433eff373b1. {{(pid=61215) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1284.347204] env[61215]: DEBUG nova.network.neutron [req-49ca1461-bcf0-4aac-be8e-0d56073464e7 req-62c66e8a-360c-4058-b86a-63d18da8a174 service nova] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Updating instance_info_cache with network_info: [{"id": "45151659-bc1f-4b2f-9f1d-6433eff373b1", "address": "fa:16:3e:a0:33:86", "network": {"id": "34848189-999b-428a-bb01-fa9bec21457c", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1767614235-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a8a305c6ce3c4e368e8a8c32d543c655", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dcf5c3f7-4e33-4f21-b323-3673930b789c", "external-id": "nsx-vlan-transportzone-983", "segmentation_id": 983, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45151659-bc", "ovs_interfaceid": "45151659-bc1f-4b2f-9f1d-6433eff373b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1284.364535] env[61215]: DEBUG oslo_concurrency.lockutils [req-49ca1461-bcf0-4aac-be8e-0d56073464e7 req-62c66e8a-360c-4058-b86a-63d18da8a174 service nova] Releasing lock "refresh_cache-97dae204-f706-41b5-bf9f-b320d022b2f3" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1284.415210] env[61215]: DEBUG oslo_concurrency.lockutils [None req-dc95e430-386a-4245-b0cb-b9ebca5ef646 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Acquiring lock "8d807f25-01ea-42b2-b5ed-b8ff2b6c39ad" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1284.415210] env[61215]: DEBUG oslo_concurrency.lockutils [None req-dc95e430-386a-4245-b0cb-b9ebca5ef646 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Lock "8d807f25-01ea-42b2-b5ed-b8ff2b6c39ad" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1284.804987] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8c813b26-2396-4945-8d07-29e004608a22 tempest-ServersTestManualDisk-677728471 tempest-ServersTestManualDisk-677728471-project-member] Acquiring lock "0ad2b135-a40e-4353-a524-1d66435197bf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1284.805250] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8c813b26-2396-4945-8d07-29e004608a22 tempest-ServersTestManualDisk-677728471 tempest-ServersTestManualDisk-677728471-project-member] Lock "0ad2b135-a40e-4353-a524-1d66435197bf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1285.293455] env[61215]: DEBUG oslo_concurrency.lockutils [None req-388f4f53-e221-4c0f-b93f-56afe40b7fb2 tempest-SecurityGroupsTestJSON-1182727218 tempest-SecurityGroupsTestJSON-1182727218-project-member] Acquiring lock "7d9fbfe1-a62e-41e2-8736-61b2f895598d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1285.293671] env[61215]: DEBUG oslo_concurrency.lockutils [None req-388f4f53-e221-4c0f-b93f-56afe40b7fb2 tempest-SecurityGroupsTestJSON-1182727218 tempest-SecurityGroupsTestJSON-1182727218-project-member] Lock "7d9fbfe1-a62e-41e2-8736-61b2f895598d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1285.924842] env[61215]: DEBUG oslo_concurrency.lockutils [None req-b4d758fa-00fe-4978-b44f-555600c0d47c tempest-ServersAdminTestJSON-1520134115 tempest-ServersAdminTestJSON-1520134115-project-member] Acquiring lock "389bf40e-6d3f-4b37-a6a7-6b18a9281da7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1285.925234] env[61215]: DEBUG oslo_concurrency.lockutils [None req-b4d758fa-00fe-4978-b44f-555600c0d47c tempest-ServersAdminTestJSON-1520134115 tempest-ServersAdminTestJSON-1520134115-project-member] Lock "389bf40e-6d3f-4b37-a6a7-6b18a9281da7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1286.909392] env[61215]: DEBUG oslo_concurrency.lockutils [None req-3be27988-e200-47ae-a99e-eac1ae81b962 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Acquiring lock "666eca1f-edf0-445d-99f3-428547f01746" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1286.909645] env[61215]: DEBUG oslo_concurrency.lockutils [None req-3be27988-e200-47ae-a99e-eac1ae81b962 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Lock "666eca1f-edf0-445d-99f3-428547f01746" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1286.968170] env[61215]: DEBUG oslo_concurrency.lockutils [None req-2b4c2576-ba83-46eb-95bf-ce7cddf5de8d tempest-ListServersNegativeTestJSON-786174427 tempest-ListServersNegativeTestJSON-786174427-project-member] Acquiring lock "9f4a9d38-7536-4804-9fde-0b14a18999b3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1286.968468] env[61215]: DEBUG oslo_concurrency.lockutils [None req-2b4c2576-ba83-46eb-95bf-ce7cddf5de8d tempest-ListServersNegativeTestJSON-786174427 tempest-ListServersNegativeTestJSON-786174427-project-member] Lock "9f4a9d38-7536-4804-9fde-0b14a18999b3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1287.007194] env[61215]: DEBUG oslo_concurrency.lockutils [None req-2b4c2576-ba83-46eb-95bf-ce7cddf5de8d tempest-ListServersNegativeTestJSON-786174427 tempest-ListServersNegativeTestJSON-786174427-project-member] Acquiring lock "d99f8ee6-768f-4775-b07e-c84536e7f659" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1287.007520] env[61215]: DEBUG oslo_concurrency.lockutils [None req-2b4c2576-ba83-46eb-95bf-ce7cddf5de8d tempest-ListServersNegativeTestJSON-786174427 tempest-ListServersNegativeTestJSON-786174427-project-member] Lock "d99f8ee6-768f-4775-b07e-c84536e7f659" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1287.053436] env[61215]: DEBUG oslo_concurrency.lockutils [None req-2b4c2576-ba83-46eb-95bf-ce7cddf5de8d tempest-ListServersNegativeTestJSON-786174427 tempest-ListServersNegativeTestJSON-786174427-project-member] Acquiring lock "1d14c483-5775-4eda-9173-67b02bd97889" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1287.054070] env[61215]: DEBUG oslo_concurrency.lockutils [None req-2b4c2576-ba83-46eb-95bf-ce7cddf5de8d tempest-ListServersNegativeTestJSON-786174427 tempest-ListServersNegativeTestJSON-786174427-project-member] Lock "1d14c483-5775-4eda-9173-67b02bd97889" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1287.211613] env[61215]: DEBUG nova.compute.manager [req-a7523dc2-c6b1-4e0b-844e-af9f65352e3c req-5fe5eee9-28ff-4e35-a0f7-bfa210d80be0 service nova] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Received event network-changed-0126bfe1-a03e-41f9-9df8-72bf1cd07a2f {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1287.211613] env[61215]: DEBUG nova.compute.manager [req-a7523dc2-c6b1-4e0b-844e-af9f65352e3c req-5fe5eee9-28ff-4e35-a0f7-bfa210d80be0 service nova] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Refreshing instance network info cache due to event network-changed-0126bfe1-a03e-41f9-9df8-72bf1cd07a2f. {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 1287.211613] env[61215]: DEBUG oslo_concurrency.lockutils [req-a7523dc2-c6b1-4e0b-844e-af9f65352e3c req-5fe5eee9-28ff-4e35-a0f7-bfa210d80be0 service nova] Acquiring lock "refresh_cache-82698789-4c08-453b-a973-1916d1f94af6" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1287.211613] env[61215]: DEBUG oslo_concurrency.lockutils [req-a7523dc2-c6b1-4e0b-844e-af9f65352e3c req-5fe5eee9-28ff-4e35-a0f7-bfa210d80be0 service nova] Acquired lock "refresh_cache-82698789-4c08-453b-a973-1916d1f94af6" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1287.211613] env[61215]: DEBUG nova.network.neutron [req-a7523dc2-c6b1-4e0b-844e-af9f65352e3c req-5fe5eee9-28ff-4e35-a0f7-bfa210d80be0 service nova] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Refreshing network info cache for port 0126bfe1-a03e-41f9-9df8-72bf1cd07a2f {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1287.721445] env[61215]: DEBUG nova.network.neutron [req-a7523dc2-c6b1-4e0b-844e-af9f65352e3c req-5fe5eee9-28ff-4e35-a0f7-bfa210d80be0 service nova] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Updated VIF entry in instance network info cache for port 0126bfe1-a03e-41f9-9df8-72bf1cd07a2f. {{(pid=61215) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1287.721793] env[61215]: DEBUG nova.network.neutron [req-a7523dc2-c6b1-4e0b-844e-af9f65352e3c req-5fe5eee9-28ff-4e35-a0f7-bfa210d80be0 service nova] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Updating instance_info_cache with network_info: [{"id": "0126bfe1-a03e-41f9-9df8-72bf1cd07a2f", "address": "fa:16:3e:f2:33:2e", "network": {"id": "a4598ef8-3be2-4e22-85ff-5928842ca89f", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-2107526397-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2fcfa7a634a74f0caeb5fa1e6a2c2bee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e7a4976-597e-4636-990e-6062b5faadee", "external-id": "nsx-vlan-transportzone-847", "segmentation_id": 847, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0126bfe1-a0", "ovs_interfaceid": "0126bfe1-a03e-41f9-9df8-72bf1cd07a2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1287.737896] env[61215]: DEBUG oslo_concurrency.lockutils [req-a7523dc2-c6b1-4e0b-844e-af9f65352e3c req-5fe5eee9-28ff-4e35-a0f7-bfa210d80be0 service nova] Releasing lock "refresh_cache-82698789-4c08-453b-a973-1916d1f94af6" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1289.314259] env[61215]: DEBUG oslo_concurrency.lockutils [None req-74e4cba9-3320-497e-8550-20a7981f58e3 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] Acquiring lock "f30dda39-422f-433d-9684-f2c7486271fa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1289.314510] env[61215]: DEBUG oslo_concurrency.lockutils [None req-74e4cba9-3320-497e-8550-20a7981f58e3 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] Lock "f30dda39-422f-433d-9684-f2c7486271fa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1291.184300] env[61215]: DEBUG oslo_concurrency.lockutils [None req-c5a0e0e6-66f3-4502-9c80-273693e532e2 tempest-AttachInterfacesUnderV243Test-1052085376 tempest-AttachInterfacesUnderV243Test-1052085376-project-member] Acquiring lock "49d99eb4-905c-409c-97e2-001801f61b38" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1291.184611] env[61215]: DEBUG oslo_concurrency.lockutils [None req-c5a0e0e6-66f3-4502-9c80-273693e532e2 tempest-AttachInterfacesUnderV243Test-1052085376 tempest-AttachInterfacesUnderV243Test-1052085376-project-member] Lock "49d99eb4-905c-409c-97e2-001801f61b38" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1294.704512] env[61215]: DEBUG oslo_concurrency.lockutils [None req-b4dbf638-ec9f-4a92-aad5-bb49f7954b93 tempest-FloatingIPsAssociationTestJSON-841876999 tempest-FloatingIPsAssociationTestJSON-841876999-project-member] Acquiring lock "1e53c769-1b6e-4e9b-805d-9ef8d8db4813" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1294.705514] env[61215]: DEBUG oslo_concurrency.lockutils [None req-b4dbf638-ec9f-4a92-aad5-bb49f7954b93 tempest-FloatingIPsAssociationTestJSON-841876999 tempest-FloatingIPsAssociationTestJSON-841876999-project-member] Lock "1e53c769-1b6e-4e9b-805d-9ef8d8db4813" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1295.839615] env[61215]: DEBUG oslo_concurrency.lockutils [None req-93631d84-ddd4-4b4f-b8f3-7d004f6de9aa tempest-ServersAdmin275Test-68190844 tempest-ServersAdmin275Test-68190844-project-member] Acquiring lock "846e6d6a-dc09-4b7e-81d2-3d2023d945c5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1295.839615] env[61215]: DEBUG oslo_concurrency.lockutils [None req-93631d84-ddd4-4b4f-b8f3-7d004f6de9aa tempest-ServersAdmin275Test-68190844 tempest-ServersAdmin275Test-68190844-project-member] Lock "846e6d6a-dc09-4b7e-81d2-3d2023d945c5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1297.604690] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8b386171-4c7c-4fc6-80d4-6c4db91b7f6c tempest-InstanceActionsTestJSON-1893819667 tempest-InstanceActionsTestJSON-1893819667-project-member] Acquiring lock "1678b6d3-d4ef-4497-a240-b43a4837d9d5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1297.604980] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8b386171-4c7c-4fc6-80d4-6c4db91b7f6c tempest-InstanceActionsTestJSON-1893819667 tempest-InstanceActionsTestJSON-1893819667-project-member] Lock "1678b6d3-d4ef-4497-a240-b43a4837d9d5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1301.093330] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9911a0e8-8a3e-48e2-a548-b520b42ab8f3 tempest-ServerActionsTestOtherB-954961186 tempest-ServerActionsTestOtherB-954961186-project-member] Acquiring lock "02249c8d-c2b8-4e58-87eb-aecab70177bf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1301.093660] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9911a0e8-8a3e-48e2-a548-b520b42ab8f3 tempest-ServerActionsTestOtherB-954961186 tempest-ServerActionsTestOtherB-954961186-project-member] Lock "02249c8d-c2b8-4e58-87eb-aecab70177bf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1302.177614] env[61215]: WARNING oslo_vmware.rw_handles [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1302.177614] env[61215]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1302.177614] env[61215]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1302.177614] env[61215]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1302.177614] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1302.177614] env[61215]: ERROR oslo_vmware.rw_handles response.begin() [ 1302.177614] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1302.177614] env[61215]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1302.177614] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1302.177614] env[61215]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1302.177614] env[61215]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1302.177614] env[61215]: ERROR oslo_vmware.rw_handles [ 1302.178226] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Downloaded image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to vmware_temp/95b31884-c35e-4d3c-a7a8-61f3907c5c61/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1302.179577] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Caching image {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1302.179812] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Copying Virtual Disk [datastore1] vmware_temp/95b31884-c35e-4d3c-a7a8-61f3907c5c61/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk to [datastore1] vmware_temp/95b31884-c35e-4d3c-a7a8-61f3907c5c61/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk {{(pid=61215) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1302.180106] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-17881a6b-0f9c-4823-a1dd-17c30df47b6f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.188206] env[61215]: DEBUG oslo_vmware.api [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Waiting for the task: (returnval){ [ 1302.188206] env[61215]: value = "task-1690285" [ 1302.188206] env[61215]: _type = "Task" [ 1302.188206] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1302.196680] env[61215]: DEBUG oslo_vmware.api [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Task: {'id': task-1690285, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.707019] env[61215]: DEBUG oslo_vmware.exceptions [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Fault InvalidArgument not matched. {{(pid=61215) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1302.707019] env[61215]: DEBUG oslo_concurrency.lockutils [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1302.709025] env[61215]: ERROR nova.compute.manager [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1302.709025] env[61215]: Faults: ['InvalidArgument'] [ 1302.709025] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Traceback (most recent call last): [ 1302.709025] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1302.709025] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] yield resources [ 1302.709025] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1302.709025] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] self.driver.spawn(context, instance, image_meta, [ 1302.709025] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1302.709025] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1302.709025] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1302.709025] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] self._fetch_image_if_missing(context, vi) [ 1302.709025] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1302.709446] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] image_cache(vi, tmp_image_ds_loc) [ 1302.709446] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1302.709446] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] vm_util.copy_virtual_disk( [ 1302.709446] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1302.709446] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] session._wait_for_task(vmdk_copy_task) [ 1302.709446] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1302.709446] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] return self.wait_for_task(task_ref) [ 1302.709446] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1302.709446] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] return evt.wait() [ 1302.709446] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1302.709446] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] result = hub.switch() [ 1302.709446] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1302.709446] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] return self.greenlet.switch() [ 1302.709797] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1302.709797] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] self.f(*self.args, **self.kw) [ 1302.709797] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1302.709797] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] raise exceptions.translate_fault(task_info.error) [ 1302.709797] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1302.709797] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Faults: ['InvalidArgument'] [ 1302.709797] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] [ 1302.712891] env[61215]: INFO nova.compute.manager [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Terminating instance [ 1302.712891] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1302.714981] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1302.714981] env[61215]: DEBUG nova.compute.manager [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1302.715309] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1302.715675] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-42f381de-016c-41cf-aceb-bf8f4418c338 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.719819] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae96b37d-6269-4c8f-849b-689957f3d396 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.728288] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Unregistering the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1302.728725] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-76fed0b4-987e-4415-bb88-e2dae2e6bb14 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.731294] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1302.733939] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61215) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1302.733939] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7a680f2-da5b-4b41-90c3-c83d940c86de {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.745450] env[61215]: DEBUG oslo_vmware.api [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Waiting for the task: (returnval){ [ 1302.745450] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]522180c2-c284-7726-5792-43ca884fe040" [ 1302.745450] env[61215]: _type = "Task" [ 1302.745450] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1302.751716] env[61215]: DEBUG oslo_vmware.api [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]522180c2-c284-7726-5792-43ca884fe040, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.825251] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Unregistered the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1302.825251] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Deleting contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1302.825251] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Deleting the datastore file [datastore1] 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1302.825664] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6121d696-acd5-4086-bc4b-66c5d3c25b82 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.838051] env[61215]: DEBUG oslo_vmware.api [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Waiting for the task: (returnval){ [ 1302.838051] env[61215]: value = "task-1690287" [ 1302.838051] env[61215]: _type = "Task" [ 1302.838051] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1302.845182] env[61215]: DEBUG oslo_vmware.api [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Task: {'id': task-1690287, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.254201] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Preparing fetch location {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1303.255795] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Creating directory with path [datastore1] vmware_temp/e7eb003b-c2d4-4ede-b4b7-8b357643302b/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1303.256283] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f28d3d01-acaf-437b-8faf-91313787f31d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.269996] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Created directory with path [datastore1] vmware_temp/e7eb003b-c2d4-4ede-b4b7-8b357643302b/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1303.269996] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Fetch image to [datastore1] vmware_temp/e7eb003b-c2d4-4ede-b4b7-8b357643302b/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1303.270387] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to [datastore1] vmware_temp/e7eb003b-c2d4-4ede-b4b7-8b357643302b/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1303.273123] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e748903-e7ac-41a2-a40a-a684f8309097 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.282840] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8603ab9e-0df7-4c1e-9c3d-8501454a1d55 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.292777] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f6e694f-b729-4754-96d6-dfe39eb9ebeb {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.324855] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3014ab85-9935-4303-8edd-f173063d2959 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.331426] env[61215]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-562b1317-7fa2-41f0-8bf9-f15f6bd84d31 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.344866] env[61215]: DEBUG oslo_vmware.api [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Task: {'id': task-1690287, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.071662} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1303.345207] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1303.345447] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Deleted contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1303.345676] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1303.346129] env[61215]: INFO nova.compute.manager [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1303.348467] env[61215]: DEBUG nova.compute.claims [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Aborting claim: {{(pid=61215) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1303.348601] env[61215]: DEBUG oslo_concurrency.lockutils [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1303.348816] env[61215]: DEBUG oslo_concurrency.lockutils [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1303.422921] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1303.504161] env[61215]: DEBUG oslo_vmware.rw_handles [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e7eb003b-c2d4-4ede-b4b7-8b357643302b/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1303.567617] env[61215]: DEBUG oslo_vmware.rw_handles [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Completed reading data from the image iterator. {{(pid=61215) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1303.567617] env[61215]: DEBUG oslo_vmware.rw_handles [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e7eb003b-c2d4-4ede-b4b7-8b357643302b/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1303.954019] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ce284ef-d791-4f74-9aa7-a5680b7f7b7d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.962418] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bbe8818-5cf8-4646-a7e1-52095a51d2aa {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.991889] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-236f3acc-aecc-401e-847c-d192c2f266ef {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.999731] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c774b611-0743-46ee-ac59-d0c79d5fe2d9 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.013368] env[61215]: DEBUG nova.compute.provider_tree [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1304.027568] env[61215]: DEBUG nova.scheduler.client.report [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1304.054666] env[61215]: DEBUG oslo_concurrency.lockutils [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.705s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1304.055292] env[61215]: ERROR nova.compute.manager [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1304.055292] env[61215]: Faults: ['InvalidArgument'] [ 1304.055292] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Traceback (most recent call last): [ 1304.055292] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1304.055292] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] self.driver.spawn(context, instance, image_meta, [ 1304.055292] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1304.055292] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1304.055292] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1304.055292] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] self._fetch_image_if_missing(context, vi) [ 1304.055292] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1304.055292] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] image_cache(vi, tmp_image_ds_loc) [ 1304.055292] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1304.056485] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] vm_util.copy_virtual_disk( [ 1304.056485] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1304.056485] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] session._wait_for_task(vmdk_copy_task) [ 1304.056485] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1304.056485] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] return self.wait_for_task(task_ref) [ 1304.056485] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1304.056485] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] return evt.wait() [ 1304.056485] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1304.056485] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] result = hub.switch() [ 1304.056485] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1304.056485] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] return self.greenlet.switch() [ 1304.056485] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1304.056485] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] self.f(*self.args, **self.kw) [ 1304.056977] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1304.056977] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] raise exceptions.translate_fault(task_info.error) [ 1304.056977] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1304.056977] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Faults: ['InvalidArgument'] [ 1304.056977] env[61215]: ERROR nova.compute.manager [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] [ 1304.056977] env[61215]: DEBUG nova.compute.utils [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] VimFaultException {{(pid=61215) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1304.059701] env[61215]: DEBUG nova.compute.manager [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Build of instance 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028 was re-scheduled: A specified parameter was not correct: fileType [ 1304.059701] env[61215]: Faults: ['InvalidArgument'] {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1304.059701] env[61215]: DEBUG nova.compute.manager [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Unplugging VIFs for instance {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1304.059701] env[61215]: DEBUG nova.compute.manager [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1304.060032] env[61215]: DEBUG nova.compute.manager [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1304.060032] env[61215]: DEBUG nova.network.neutron [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1304.631462] env[61215]: DEBUG nova.network.neutron [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1304.648368] env[61215]: INFO nova.compute.manager [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] [instance: 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028] Took 0.59 seconds to deallocate network for instance. [ 1304.772592] env[61215]: INFO nova.scheduler.client.report [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Deleted allocations for instance 2a605c78-bef9-44f0-bcd0-e5bd3e1b0028 [ 1304.803792] env[61215]: DEBUG oslo_concurrency.lockutils [None req-18eaedc4-775c-4ea4-9aa0-965ccfce4d7c tempest-AttachInterfacesV270Test-535805287 tempest-AttachInterfacesV270Test-535805287-project-member] Lock "2a605c78-bef9-44f0-bcd0-e5bd3e1b0028" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.428s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1304.853479] env[61215]: DEBUG nova.compute.manager [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1304.925330] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1304.925605] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1304.927250] env[61215]: INFO nova.compute.claims [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1305.463531] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ce70b07-bebf-4244-8189-72cac0f18ffb {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.472851] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81c60c7a-8dd8-4881-a357-86a4ac2c1164 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.506421] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11e26292-b26e-4cd8-86ee-fc453f5974d6 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.514654] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e11285a0-ab85-4097-ad91-f22b603d5832 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.529507] env[61215]: DEBUG nova.compute.provider_tree [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1305.545323] env[61215]: DEBUG nova.scheduler.client.report [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1305.566677] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.641s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1305.567190] env[61215]: DEBUG nova.compute.manager [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Start building networks asynchronously for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1305.616061] env[61215]: DEBUG nova.compute.utils [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Using /dev/sd instead of None {{(pid=61215) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1305.618259] env[61215]: DEBUG nova.compute.manager [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Allocating IP information in the background. {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1305.618351] env[61215]: DEBUG nova.network.neutron [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] allocate_for_instance() {{(pid=61215) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1305.629189] env[61215]: DEBUG nova.compute.manager [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Start building block device mappings for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1305.708395] env[61215]: DEBUG nova.compute.manager [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Start spawning the instance on the hypervisor. {{(pid=61215) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1305.725199] env[61215]: DEBUG nova.policy [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e525c51f054b4ee6936c066be4cd7c1a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b5ec8b5264984461a57598f0c7c9cc83', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61215) authorize /opt/stack/nova/nova/policy.py:203}} [ 1305.744802] env[61215]: DEBUG nova.virt.hardware [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:05:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:04:45Z,direct_url=,disk_format='vmdk',id=e91f0c25-9ff9-4937-8440-f47cfb2028bc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9bc3f57c82894c5d9e08b66e77a25dc5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:04:46Z,virtual_size=,visibility=), allow threads: False {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1305.745076] env[61215]: DEBUG nova.virt.hardware [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Flavor limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1305.745268] env[61215]: DEBUG nova.virt.hardware [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Image limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1305.745472] env[61215]: DEBUG nova.virt.hardware [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Flavor pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1305.745621] env[61215]: DEBUG nova.virt.hardware [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Image pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1305.745989] env[61215]: DEBUG nova.virt.hardware [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1305.745989] env[61215]: DEBUG nova.virt.hardware [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1305.746314] env[61215]: DEBUG nova.virt.hardware [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1305.746387] env[61215]: DEBUG nova.virt.hardware [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Got 1 possible topologies {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1305.747557] env[61215]: DEBUG nova.virt.hardware [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1305.747557] env[61215]: DEBUG nova.virt.hardware [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1305.747878] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8763c3ae-c87d-4443-844f-39804776c6aa {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.757557] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d177349b-720a-4b3b-922b-6fd573fa36dd {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.079468] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1306.263944] env[61215]: DEBUG nova.network.neutron [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Successfully created port: 6b91c70c-b10e-4bf7-b800-dd44d4a62e20 {{(pid=61215) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1306.649586] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1307.103307] env[61215]: DEBUG nova.network.neutron [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Successfully updated port: 6b91c70c-b10e-4bf7-b800-dd44d4a62e20 {{(pid=61215) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1307.120678] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Acquiring lock "refresh_cache-0223d7b6-12e1-4418-97f2-012ed41daa7a" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1307.121752] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Acquired lock "refresh_cache-0223d7b6-12e1-4418-97f2-012ed41daa7a" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1307.121752] env[61215]: DEBUG nova.network.neutron [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1307.131850] env[61215]: DEBUG oslo_concurrency.lockutils [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Acquiring lock "fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1307.135031] env[61215]: DEBUG oslo_concurrency.lockutils [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Lock "fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.003s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1307.180136] env[61215]: DEBUG nova.network.neutron [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1307.444147] env[61215]: DEBUG nova.network.neutron [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Updating instance_info_cache with network_info: [{"id": "6b91c70c-b10e-4bf7-b800-dd44d4a62e20", "address": "fa:16:3e:7b:df:8d", "network": {"id": "b41ea6f8-87d3-4ecb-ad6a-cd29ff108ebf", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1993005571-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5ec8b5264984461a57598f0c7c9cc83", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed3ffc1d-9f86-4029-857e-6cd1d383edbb", "external-id": "nsx-vlan-transportzone-759", "segmentation_id": 759, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b91c70c-b1", "ovs_interfaceid": "6b91c70c-b10e-4bf7-b800-dd44d4a62e20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1307.462475] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Releasing lock "refresh_cache-0223d7b6-12e1-4418-97f2-012ed41daa7a" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1307.462831] env[61215]: DEBUG nova.compute.manager [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Instance network_info: |[{"id": "6b91c70c-b10e-4bf7-b800-dd44d4a62e20", "address": "fa:16:3e:7b:df:8d", "network": {"id": "b41ea6f8-87d3-4ecb-ad6a-cd29ff108ebf", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1993005571-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5ec8b5264984461a57598f0c7c9cc83", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed3ffc1d-9f86-4029-857e-6cd1d383edbb", "external-id": "nsx-vlan-transportzone-759", "segmentation_id": 759, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b91c70c-b1", "ovs_interfaceid": "6b91c70c-b10e-4bf7-b800-dd44d4a62e20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1307.463254] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7b:df:8d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ed3ffc1d-9f86-4029-857e-6cd1d383edbb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6b91c70c-b10e-4bf7-b800-dd44d4a62e20', 'vif_model': 'vmxnet3'}] {{(pid=61215) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1307.471918] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Creating folder: Project (b5ec8b5264984461a57598f0c7c9cc83). Parent ref: group-v352463. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1307.475299] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3b837e08-8769-4545-9289-96a16c3ea302 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.484031] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Created folder: Project (b5ec8b5264984461a57598f0c7c9cc83) in parent group-v352463. [ 1307.484279] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Creating folder: Instances. Parent ref: group-v352494. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1307.484544] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7c1a11ea-42b5-43c2-a95d-421e9cceba6c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.493514] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Created folder: Instances in parent group-v352494. [ 1307.493808] env[61215]: DEBUG oslo.service.loopingcall [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1307.494058] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Creating VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1307.494328] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6f3dcd75-ad5d-4759-91e2-f9c4ef6740cb {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.517606] env[61215]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1307.517606] env[61215]: value = "task-1690290" [ 1307.517606] env[61215]: _type = "Task" [ 1307.517606] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.530130] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690290, 'name': CreateVM_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.654178] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1307.654395] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Starting heal instance info cache {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 1307.654520] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Rebuilding the list of instances to heal {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1307.684584] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1307.684719] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1307.684854] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1307.684980] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1307.685117] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1307.685266] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1307.685394] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1307.685514] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1307.685632] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1307.685748] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1307.685867] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Didn't find any instances for network info cache update. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 1308.028668] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690290, 'name': CreateVM_Task, 'duration_secs': 0.302727} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.028867] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Created VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1308.029585] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1308.029764] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1308.030101] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1308.030358] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e58e7401-dc14-4192-81c8-0cfa7f75ad6d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.035700] env[61215]: DEBUG oslo_vmware.api [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Waiting for the task: (returnval){ [ 1308.035700] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52962ad8-ae07-5991-a8d9-c193708316b0" [ 1308.035700] env[61215]: _type = "Task" [ 1308.035700] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.048384] env[61215]: DEBUG oslo_vmware.api [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52962ad8-ae07-5991-a8d9-c193708316b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.523459] env[61215]: DEBUG nova.compute.manager [req-6d69fb99-0396-4209-9706-18e005ab0a09 req-cdd6b52f-ae44-4d87-8b83-520e4ea18b76 service nova] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Received event network-vif-plugged-6b91c70c-b10e-4bf7-b800-dd44d4a62e20 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1308.523695] env[61215]: DEBUG oslo_concurrency.lockutils [req-6d69fb99-0396-4209-9706-18e005ab0a09 req-cdd6b52f-ae44-4d87-8b83-520e4ea18b76 service nova] Acquiring lock "0223d7b6-12e1-4418-97f2-012ed41daa7a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1308.523903] env[61215]: DEBUG oslo_concurrency.lockutils [req-6d69fb99-0396-4209-9706-18e005ab0a09 req-cdd6b52f-ae44-4d87-8b83-520e4ea18b76 service nova] Lock "0223d7b6-12e1-4418-97f2-012ed41daa7a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1308.524373] env[61215]: DEBUG oslo_concurrency.lockutils [req-6d69fb99-0396-4209-9706-18e005ab0a09 req-cdd6b52f-ae44-4d87-8b83-520e4ea18b76 service nova] Lock "0223d7b6-12e1-4418-97f2-012ed41daa7a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1308.524624] env[61215]: DEBUG nova.compute.manager [req-6d69fb99-0396-4209-9706-18e005ab0a09 req-cdd6b52f-ae44-4d87-8b83-520e4ea18b76 service nova] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] No waiting events found dispatching network-vif-plugged-6b91c70c-b10e-4bf7-b800-dd44d4a62e20 {{(pid=61215) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1308.524808] env[61215]: WARNING nova.compute.manager [req-6d69fb99-0396-4209-9706-18e005ab0a09 req-cdd6b52f-ae44-4d87-8b83-520e4ea18b76 service nova] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Received unexpected event network-vif-plugged-6b91c70c-b10e-4bf7-b800-dd44d4a62e20 for instance with vm_state building and task_state spawning. [ 1308.524978] env[61215]: DEBUG nova.compute.manager [req-6d69fb99-0396-4209-9706-18e005ab0a09 req-cdd6b52f-ae44-4d87-8b83-520e4ea18b76 service nova] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Received event network-changed-6b91c70c-b10e-4bf7-b800-dd44d4a62e20 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1308.525195] env[61215]: DEBUG nova.compute.manager [req-6d69fb99-0396-4209-9706-18e005ab0a09 req-cdd6b52f-ae44-4d87-8b83-520e4ea18b76 service nova] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Refreshing instance network info cache due to event network-changed-6b91c70c-b10e-4bf7-b800-dd44d4a62e20. {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 1308.525387] env[61215]: DEBUG oslo_concurrency.lockutils [req-6d69fb99-0396-4209-9706-18e005ab0a09 req-cdd6b52f-ae44-4d87-8b83-520e4ea18b76 service nova] Acquiring lock "refresh_cache-0223d7b6-12e1-4418-97f2-012ed41daa7a" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1308.525529] env[61215]: DEBUG oslo_concurrency.lockutils [req-6d69fb99-0396-4209-9706-18e005ab0a09 req-cdd6b52f-ae44-4d87-8b83-520e4ea18b76 service nova] Acquired lock "refresh_cache-0223d7b6-12e1-4418-97f2-012ed41daa7a" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1308.525688] env[61215]: DEBUG nova.network.neutron [req-6d69fb99-0396-4209-9706-18e005ab0a09 req-cdd6b52f-ae44-4d87-8b83-520e4ea18b76 service nova] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Refreshing network info cache for port 6b91c70c-b10e-4bf7-b800-dd44d4a62e20 {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1308.553438] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1308.553723] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Processing image e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1308.553937] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1308.657044] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1308.657261] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61215) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 1309.582217] env[61215]: DEBUG nova.network.neutron [req-6d69fb99-0396-4209-9706-18e005ab0a09 req-cdd6b52f-ae44-4d87-8b83-520e4ea18b76 service nova] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Updated VIF entry in instance network info cache for port 6b91c70c-b10e-4bf7-b800-dd44d4a62e20. {{(pid=61215) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1309.582589] env[61215]: DEBUG nova.network.neutron [req-6d69fb99-0396-4209-9706-18e005ab0a09 req-cdd6b52f-ae44-4d87-8b83-520e4ea18b76 service nova] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Updating instance_info_cache with network_info: [{"id": "6b91c70c-b10e-4bf7-b800-dd44d4a62e20", "address": "fa:16:3e:7b:df:8d", "network": {"id": "b41ea6f8-87d3-4ecb-ad6a-cd29ff108ebf", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1993005571-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5ec8b5264984461a57598f0c7c9cc83", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed3ffc1d-9f86-4029-857e-6cd1d383edbb", "external-id": "nsx-vlan-transportzone-759", "segmentation_id": 759, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b91c70c-b1", "ovs_interfaceid": "6b91c70c-b10e-4bf7-b800-dd44d4a62e20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1309.805027] env[61215]: DEBUG oslo_concurrency.lockutils [req-6d69fb99-0396-4209-9706-18e005ab0a09 req-cdd6b52f-ae44-4d87-8b83-520e4ea18b76 service nova] Releasing lock "refresh_cache-0223d7b6-12e1-4418-97f2-012ed41daa7a" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1309.805027] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1309.805027] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1309.805027] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1310.655136] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1311.650704] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1311.683372] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1311.695327] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1311.695575] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1311.695755] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1311.695927] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61215) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1311.698337] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ef6352c-ddcd-43b9-9822-b1fca7d8fa28 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.708289] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b7722ba-46b0-4a59-9e8a-60c662e60e37 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.726691] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d53f361a-2d15-4c7b-8edd-28b7a61a9082 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.735098] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7df6195-ccd0-4149-a221-035a1bb73e1b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.769212] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181289MB free_disk=173GB free_vcpus=48 pci_devices=None {{(pid=61215) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1311.769373] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1311.769579] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1311.854208] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 1eaf05ba-8235-4a68-b807-db95e65c0933 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1311.854373] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 351c2ada-945a-4f0b-8fa9-47e3412c5e05 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1311.854499] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance d2756d6f-d1f6-4408-83a6-3cbae8bf8b04 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1311.854621] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 805748d7-e459-4608-a02d-05ac56c48290 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1311.854738] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance ef0f6995-b272-4a45-a09d-5d8d38ffe23c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1311.854855] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 67068a42-eba7-4529-9ebf-43d6865362b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1311.854969] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance ad40882f-de01-4bee-81dd-e91d07248d22 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1311.855094] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 97dae204-f706-41b5-bf9f-b320d022b2f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1311.855225] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 82698789-4c08-453b-a973-1916d1f94af6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1311.855347] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 0223d7b6-12e1-4418-97f2-012ed41daa7a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1311.889848] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1311.919724] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1311.932958] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance d49f702b-cd29-4491-938c-0291b351ef20 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1311.945469] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 44f29d5d-46d6-433a-972b-f971a04200e1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1311.961076] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 8d807f25-01ea-42b2-b5ed-b8ff2b6c39ad has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1311.983826] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 0ad2b135-a40e-4353-a524-1d66435197bf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1311.997236] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 7d9fbfe1-a62e-41e2-8736-61b2f895598d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1312.015561] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 389bf40e-6d3f-4b37-a6a7-6b18a9281da7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1312.029173] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 9f4a9d38-7536-4804-9fde-0b14a18999b3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1312.045715] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance d99f8ee6-768f-4775-b07e-c84536e7f659 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1312.058211] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 1d14c483-5775-4eda-9173-67b02bd97889 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1312.076770] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 666eca1f-edf0-445d-99f3-428547f01746 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1312.092589] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance f30dda39-422f-433d-9684-f2c7486271fa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1312.108122] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 49d99eb4-905c-409c-97e2-001801f61b38 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1312.121880] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 1e53c769-1b6e-4e9b-805d-9ef8d8db4813 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1312.139561] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 846e6d6a-dc09-4b7e-81d2-3d2023d945c5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1312.155658] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 1678b6d3-d4ef-4497-a240-b43a4837d9d5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1312.168178] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 02249c8d-c2b8-4e58-87eb-aecab70177bf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1312.179632] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1312.181152] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1312.181152] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1312.668025] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85418d1d-9b4d-4384-a8dd-9c87cb071e2a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.677399] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f31b1c9-b704-4d26-97fa-7e7838395fd2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.709576] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e10a053-3390-47ff-a4a4-3c6c7d43e5ad {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.717286] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb0d25d7-3ff2-4a0b-8d30-ad3ba6cc4e49 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.731878] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1312.741625] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1312.764556] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61215) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1312.764791] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.995s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1314.405784] env[61215]: DEBUG oslo_concurrency.lockutils [None req-23c3cc1d-b542-474d-b396-9df22068a017 tempest-ServersTestFqdnHostnames-1487308690 tempest-ServersTestFqdnHostnames-1487308690-project-member] Acquiring lock "6bf85a18-78f3-4471-bdc6-b600f90e1700" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1314.406374] env[61215]: DEBUG oslo_concurrency.lockutils [None req-23c3cc1d-b542-474d-b396-9df22068a017 tempest-ServersTestFqdnHostnames-1487308690 tempest-ServersTestFqdnHostnames-1487308690-project-member] Lock "6bf85a18-78f3-4471-bdc6-b600f90e1700" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1319.168946] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8191627f-da3b-488d-a377-a14e8ee3ea5a tempest-ServerMetadataNegativeTestJSON-232383651 tempest-ServerMetadataNegativeTestJSON-232383651-project-member] Acquiring lock "11f8cfbd-7fdd-4a5f-9fde-477caa043b0d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1319.169552] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8191627f-da3b-488d-a377-a14e8ee3ea5a tempest-ServerMetadataNegativeTestJSON-232383651 tempest-ServerMetadataNegativeTestJSON-232383651-project-member] Lock "11f8cfbd-7fdd-4a5f-9fde-477caa043b0d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1320.048914] env[61215]: DEBUG oslo_concurrency.lockutils [None req-3931e79d-7ea1-4737-869b-3fcba0a35f40 tempest-AttachInterfacesTestJSON-1825426080 tempest-AttachInterfacesTestJSON-1825426080-project-member] Acquiring lock "01421743-cbfc-40d9-95aa-6b26422581e4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1320.048914] env[61215]: DEBUG oslo_concurrency.lockutils [None req-3931e79d-7ea1-4737-869b-3fcba0a35f40 tempest-AttachInterfacesTestJSON-1825426080 tempest-AttachInterfacesTestJSON-1825426080-project-member] Lock "01421743-cbfc-40d9-95aa-6b26422581e4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1320.353873] env[61215]: DEBUG oslo_concurrency.lockutils [None req-55302737-dc78-4568-b2d5-66ed6de1dc6b tempest-ServerDiagnosticsNegativeTest-473520563 tempest-ServerDiagnosticsNegativeTest-473520563-project-member] Acquiring lock "c2d2f172-9d88-4b88-8cf7-bbce01619c73" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1320.354159] env[61215]: DEBUG oslo_concurrency.lockutils [None req-55302737-dc78-4568-b2d5-66ed6de1dc6b tempest-ServerDiagnosticsNegativeTest-473520563 tempest-ServerDiagnosticsNegativeTest-473520563-project-member] Lock "c2d2f172-9d88-4b88-8cf7-bbce01619c73" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1341.425864] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8a4874dd-af8c-4bbc-b0ec-cbacc9bd0ecf tempest-ServersTestBootFromVolume-1998329874 tempest-ServersTestBootFromVolume-1998329874-project-member] Acquiring lock "7f0ff2e7-30e3-425b-beff-061ba242981a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1341.426237] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8a4874dd-af8c-4bbc-b0ec-cbacc9bd0ecf tempest-ServersTestBootFromVolume-1998329874 tempest-ServersTestBootFromVolume-1998329874-project-member] Lock "7f0ff2e7-30e3-425b-beff-061ba242981a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1349.047209] env[61215]: WARNING oslo_vmware.rw_handles [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1349.047209] env[61215]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1349.047209] env[61215]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1349.047209] env[61215]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1349.047209] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1349.047209] env[61215]: ERROR oslo_vmware.rw_handles response.begin() [ 1349.047209] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1349.047209] env[61215]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1349.047209] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1349.047209] env[61215]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1349.047209] env[61215]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1349.047209] env[61215]: ERROR oslo_vmware.rw_handles [ 1349.047752] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Downloaded image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to vmware_temp/e7eb003b-c2d4-4ede-b4b7-8b357643302b/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1349.049319] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Caching image {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1349.049575] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Copying Virtual Disk [datastore1] vmware_temp/e7eb003b-c2d4-4ede-b4b7-8b357643302b/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk to [datastore1] vmware_temp/e7eb003b-c2d4-4ede-b4b7-8b357643302b/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk {{(pid=61215) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1349.049865] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9b4d7ffd-040a-4040-99e6-93cd21d5947d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.059345] env[61215]: DEBUG oslo_vmware.api [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Waiting for the task: (returnval){ [ 1349.059345] env[61215]: value = "task-1690302" [ 1349.059345] env[61215]: _type = "Task" [ 1349.059345] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.067932] env[61215]: DEBUG oslo_vmware.api [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Task: {'id': task-1690302, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.571464] env[61215]: DEBUG oslo_vmware.exceptions [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Fault InvalidArgument not matched. {{(pid=61215) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1349.571464] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1349.571464] env[61215]: ERROR nova.compute.manager [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1349.571464] env[61215]: Faults: ['InvalidArgument'] [ 1349.571464] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Traceback (most recent call last): [ 1349.571464] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1349.571464] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] yield resources [ 1349.571464] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1349.571464] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] self.driver.spawn(context, instance, image_meta, [ 1349.571800] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1349.571800] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1349.571800] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1349.571800] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] self._fetch_image_if_missing(context, vi) [ 1349.571800] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1349.571800] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] image_cache(vi, tmp_image_ds_loc) [ 1349.571800] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1349.571800] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] vm_util.copy_virtual_disk( [ 1349.571800] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1349.571800] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] session._wait_for_task(vmdk_copy_task) [ 1349.571800] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1349.571800] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] return self.wait_for_task(task_ref) [ 1349.571800] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1349.572103] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] return evt.wait() [ 1349.572103] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1349.572103] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] result = hub.switch() [ 1349.572103] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1349.572103] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] return self.greenlet.switch() [ 1349.572103] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1349.572103] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] self.f(*self.args, **self.kw) [ 1349.572103] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1349.572103] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] raise exceptions.translate_fault(task_info.error) [ 1349.572103] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1349.572103] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Faults: ['InvalidArgument'] [ 1349.572103] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] [ 1349.572374] env[61215]: INFO nova.compute.manager [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Terminating instance [ 1349.573211] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1349.573481] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1349.573771] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6046cfcc-708a-4453-a3ec-b32011bb61fc {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.576052] env[61215]: DEBUG nova.compute.manager [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1349.576300] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1349.577103] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04b3fd7b-c555-415a-a8f9-300130e2fa50 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.584577] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Unregistering the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1349.584852] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2c9f3373-ddad-47aa-b94a-2074f712caab {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.587172] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1349.587409] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61215) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1349.588413] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55621a84-78c2-4a69-b8a4-6b0f69f07ba9 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.593454] env[61215]: DEBUG oslo_vmware.api [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Waiting for the task: (returnval){ [ 1349.593454] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]5293b6c5-030d-db59-a8d5-99c1f3669132" [ 1349.593454] env[61215]: _type = "Task" [ 1349.593454] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.607507] env[61215]: DEBUG oslo_vmware.api [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5293b6c5-030d-db59-a8d5-99c1f3669132, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.671214] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Unregistered the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1349.671457] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Deleting contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1349.671641] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Deleting the datastore file [datastore1] 1eaf05ba-8235-4a68-b807-db95e65c0933 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1349.671900] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c41b0453-d5f1-4f7b-9ff0-46098f47ac77 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.686738] env[61215]: DEBUG oslo_vmware.api [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Waiting for the task: (returnval){ [ 1349.686738] env[61215]: value = "task-1690304" [ 1349.686738] env[61215]: _type = "Task" [ 1349.686738] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.695489] env[61215]: DEBUG oslo_vmware.api [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Task: {'id': task-1690304, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.104867] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Preparing fetch location {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1350.105185] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Creating directory with path [datastore1] vmware_temp/97b603c0-ff2d-42d3-853a-fcdf78342341/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1350.105470] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6c942adf-1401-471d-add4-be2e88914bf0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.116429] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Created directory with path [datastore1] vmware_temp/97b603c0-ff2d-42d3-853a-fcdf78342341/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1350.116640] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Fetch image to [datastore1] vmware_temp/97b603c0-ff2d-42d3-853a-fcdf78342341/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1350.116809] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to [datastore1] vmware_temp/97b603c0-ff2d-42d3-853a-fcdf78342341/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1350.117556] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faafc7e6-86cb-4a4f-bd0f-3203854c344b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.124102] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3b7217b-83c7-4530-ae5b-1ba5570ad979 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.134053] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a7735ce-77e8-40fa-bbcd-32ec5f1870b3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.165710] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cfa7430-2655-4bb5-bc35-56ec453d6595 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.173614] env[61215]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ca00b1f8-f02f-42b4-af98-9eae4e878db3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.195285] env[61215]: DEBUG oslo_vmware.api [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Task: {'id': task-1690304, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.065041} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.195532] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1350.195748] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Deleted contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1350.195930] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1350.196122] env[61215]: INFO nova.compute.manager [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1350.198338] env[61215]: DEBUG nova.compute.claims [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Aborting claim: {{(pid=61215) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1350.198509] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1350.198718] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1350.260659] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1350.312065] env[61215]: DEBUG oslo_vmware.rw_handles [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/97b603c0-ff2d-42d3-853a-fcdf78342341/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1350.373041] env[61215]: DEBUG oslo_vmware.rw_handles [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Completed reading data from the image iterator. {{(pid=61215) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1350.373260] env[61215]: DEBUG oslo_vmware.rw_handles [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/97b603c0-ff2d-42d3-853a-fcdf78342341/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1350.885951] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7de36050-e1c8-4a2d-afb7-9ea608a77e21 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.894408] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36b0b5e9-ef85-456d-8dc7-aad649c97522 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.925442] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7796d85f-e8f3-4480-b00d-21be87c75e07 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.932540] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29f331da-da19-4864-906c-257d157d65a0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.945428] env[61215]: DEBUG nova.compute.provider_tree [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1350.958747] env[61215]: DEBUG nova.scheduler.client.report [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1350.979313] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.780s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1350.979806] env[61215]: ERROR nova.compute.manager [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1350.979806] env[61215]: Faults: ['InvalidArgument'] [ 1350.979806] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Traceback (most recent call last): [ 1350.979806] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1350.979806] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] self.driver.spawn(context, instance, image_meta, [ 1350.979806] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1350.979806] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1350.979806] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1350.979806] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] self._fetch_image_if_missing(context, vi) [ 1350.979806] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1350.979806] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] image_cache(vi, tmp_image_ds_loc) [ 1350.979806] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1350.980215] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] vm_util.copy_virtual_disk( [ 1350.980215] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1350.980215] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] session._wait_for_task(vmdk_copy_task) [ 1350.980215] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1350.980215] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] return self.wait_for_task(task_ref) [ 1350.980215] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1350.980215] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] return evt.wait() [ 1350.980215] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1350.980215] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] result = hub.switch() [ 1350.980215] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1350.980215] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] return self.greenlet.switch() [ 1350.980215] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1350.980215] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] self.f(*self.args, **self.kw) [ 1350.980540] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1350.980540] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] raise exceptions.translate_fault(task_info.error) [ 1350.980540] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1350.980540] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Faults: ['InvalidArgument'] [ 1350.980540] env[61215]: ERROR nova.compute.manager [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] [ 1350.980540] env[61215]: DEBUG nova.compute.utils [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] VimFaultException {{(pid=61215) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1350.982051] env[61215]: DEBUG nova.compute.manager [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Build of instance 1eaf05ba-8235-4a68-b807-db95e65c0933 was re-scheduled: A specified parameter was not correct: fileType [ 1350.982051] env[61215]: Faults: ['InvalidArgument'] {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1350.982433] env[61215]: DEBUG nova.compute.manager [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Unplugging VIFs for instance {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1350.982611] env[61215]: DEBUG nova.compute.manager [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1350.982785] env[61215]: DEBUG nova.compute.manager [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1350.982950] env[61215]: DEBUG nova.network.neutron [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1351.381100] env[61215]: DEBUG nova.network.neutron [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1351.392702] env[61215]: INFO nova.compute.manager [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] [instance: 1eaf05ba-8235-4a68-b807-db95e65c0933] Took 0.41 seconds to deallocate network for instance. [ 1351.542912] env[61215]: INFO nova.scheduler.client.report [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Deleted allocations for instance 1eaf05ba-8235-4a68-b807-db95e65c0933 [ 1351.574844] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0e4760ad-f511-4d4a-a4a6-3f61c2074bcc tempest-ServerAddressesTestJSON-1885845242 tempest-ServerAddressesTestJSON-1885845242-project-member] Lock "1eaf05ba-8235-4a68-b807-db95e65c0933" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 107.168s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1351.598510] env[61215]: DEBUG nova.compute.manager [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1351.652644] env[61215]: DEBUG oslo_concurrency.lockutils [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1351.652905] env[61215]: DEBUG oslo_concurrency.lockutils [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1351.654429] env[61215]: INFO nova.compute.claims [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1352.104723] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d48a8c5d-42c5-4625-9be9-fee60944d117 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.112260] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b519c3f-f854-42aa-9a0c-bd926efd410f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.142861] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b8a6d18-aa16-40b8-b70e-24881df7effa {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.151469] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66a1d195-6e0a-4e29-8e7d-2443bf0b4883 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.165217] env[61215]: DEBUG nova.compute.provider_tree [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1352.174921] env[61215]: DEBUG nova.scheduler.client.report [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1352.192314] env[61215]: DEBUG oslo_concurrency.lockutils [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.539s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1352.192801] env[61215]: DEBUG nova.compute.manager [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Start building networks asynchronously for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1352.227026] env[61215]: DEBUG nova.compute.utils [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Using /dev/sd instead of None {{(pid=61215) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1352.228075] env[61215]: DEBUG nova.compute.manager [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Allocating IP information in the background. {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1352.228371] env[61215]: DEBUG nova.network.neutron [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] allocate_for_instance() {{(pid=61215) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1352.237058] env[61215]: DEBUG nova.compute.manager [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Start building block device mappings for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1352.302035] env[61215]: DEBUG nova.compute.manager [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Start spawning the instance on the hypervisor. {{(pid=61215) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1352.322992] env[61215]: DEBUG nova.policy [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c903ec26067349b4b04807d180549f6b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd2a19fe024884a109a7f5818e49c1d98', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61215) authorize /opt/stack/nova/nova/policy.py:203}} [ 1352.333967] env[61215]: DEBUG nova.virt.hardware [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:05:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:04:45Z,direct_url=,disk_format='vmdk',id=e91f0c25-9ff9-4937-8440-f47cfb2028bc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9bc3f57c82894c5d9e08b66e77a25dc5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:04:46Z,virtual_size=,visibility=), allow threads: False {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1352.333967] env[61215]: DEBUG nova.virt.hardware [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Flavor limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1352.333967] env[61215]: DEBUG nova.virt.hardware [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Image limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1352.334387] env[61215]: DEBUG nova.virt.hardware [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Flavor pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1352.334387] env[61215]: DEBUG nova.virt.hardware [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Image pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1352.334387] env[61215]: DEBUG nova.virt.hardware [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1352.334830] env[61215]: DEBUG nova.virt.hardware [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1352.334830] env[61215]: DEBUG nova.virt.hardware [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1352.334936] env[61215]: DEBUG nova.virt.hardware [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Got 1 possible topologies {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1352.335052] env[61215]: DEBUG nova.virt.hardware [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1352.335231] env[61215]: DEBUG nova.virt.hardware [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1352.336387] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49776067-c8fd-4fee-bbd8-9905e3051814 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.344478] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bffffe3-2956-4b61-bed1-6b15a8dd47da {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.737631] env[61215]: DEBUG nova.network.neutron [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Successfully created port: d3c842e5-4498-4473-aec9-fe8f2b53a871 {{(pid=61215) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1353.727616] env[61215]: DEBUG nova.network.neutron [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Successfully updated port: d3c842e5-4498-4473-aec9-fe8f2b53a871 {{(pid=61215) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1353.739121] env[61215]: DEBUG oslo_concurrency.lockutils [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Acquiring lock "refresh_cache-85d35c2a-bdfe-400b-a90c-082b9ae4ce7d" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1353.739362] env[61215]: DEBUG oslo_concurrency.lockutils [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Acquired lock "refresh_cache-85d35c2a-bdfe-400b-a90c-082b9ae4ce7d" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1353.739696] env[61215]: DEBUG nova.network.neutron [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1353.830263] env[61215]: DEBUG nova.network.neutron [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1353.926262] env[61215]: DEBUG nova.compute.manager [req-6671ff9d-5fa4-4625-b3d7-e349892066c1 req-32b382b2-7c6b-490c-9e8b-7a9ac4cd7f5a service nova] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Received event network-vif-plugged-d3c842e5-4498-4473-aec9-fe8f2b53a871 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1353.926262] env[61215]: DEBUG oslo_concurrency.lockutils [req-6671ff9d-5fa4-4625-b3d7-e349892066c1 req-32b382b2-7c6b-490c-9e8b-7a9ac4cd7f5a service nova] Acquiring lock "85d35c2a-bdfe-400b-a90c-082b9ae4ce7d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1353.926262] env[61215]: DEBUG oslo_concurrency.lockutils [req-6671ff9d-5fa4-4625-b3d7-e349892066c1 req-32b382b2-7c6b-490c-9e8b-7a9ac4cd7f5a service nova] Lock "85d35c2a-bdfe-400b-a90c-082b9ae4ce7d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1353.926262] env[61215]: DEBUG oslo_concurrency.lockutils [req-6671ff9d-5fa4-4625-b3d7-e349892066c1 req-32b382b2-7c6b-490c-9e8b-7a9ac4cd7f5a service nova] Lock "85d35c2a-bdfe-400b-a90c-082b9ae4ce7d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1353.926526] env[61215]: DEBUG nova.compute.manager [req-6671ff9d-5fa4-4625-b3d7-e349892066c1 req-32b382b2-7c6b-490c-9e8b-7a9ac4cd7f5a service nova] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] No waiting events found dispatching network-vif-plugged-d3c842e5-4498-4473-aec9-fe8f2b53a871 {{(pid=61215) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1353.927390] env[61215]: WARNING nova.compute.manager [req-6671ff9d-5fa4-4625-b3d7-e349892066c1 req-32b382b2-7c6b-490c-9e8b-7a9ac4cd7f5a service nova] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Received unexpected event network-vif-plugged-d3c842e5-4498-4473-aec9-fe8f2b53a871 for instance with vm_state building and task_state spawning. [ 1354.347866] env[61215]: DEBUG nova.network.neutron [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Updating instance_info_cache with network_info: [{"id": "d3c842e5-4498-4473-aec9-fe8f2b53a871", "address": "fa:16:3e:b4:89:83", "network": {"id": "40356ccf-d799-4505-9881-8cd0d08aa42a", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1409087454-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d2a19fe024884a109a7f5818e49c1d98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8cb478a6-872c-4a90-a8db-526b374e82ce", "external-id": "nsx-vlan-transportzone-835", "segmentation_id": 835, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3c842e5-44", "ovs_interfaceid": "d3c842e5-4498-4473-aec9-fe8f2b53a871", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1354.366606] env[61215]: DEBUG oslo_concurrency.lockutils [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Releasing lock "refresh_cache-85d35c2a-bdfe-400b-a90c-082b9ae4ce7d" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1354.367053] env[61215]: DEBUG nova.compute.manager [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Instance network_info: |[{"id": "d3c842e5-4498-4473-aec9-fe8f2b53a871", "address": "fa:16:3e:b4:89:83", "network": {"id": "40356ccf-d799-4505-9881-8cd0d08aa42a", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1409087454-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d2a19fe024884a109a7f5818e49c1d98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8cb478a6-872c-4a90-a8db-526b374e82ce", "external-id": "nsx-vlan-transportzone-835", "segmentation_id": 835, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3c842e5-44", "ovs_interfaceid": "d3c842e5-4498-4473-aec9-fe8f2b53a871", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1354.367403] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b4:89:83', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8cb478a6-872c-4a90-a8db-526b374e82ce', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd3c842e5-4498-4473-aec9-fe8f2b53a871', 'vif_model': 'vmxnet3'}] {{(pid=61215) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1354.375993] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Creating folder: Project (d2a19fe024884a109a7f5818e49c1d98). Parent ref: group-v352463. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1354.376687] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7751bbe5-6b66-405e-a072-1f8d11e8c1c8 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.389305] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Created folder: Project (d2a19fe024884a109a7f5818e49c1d98) in parent group-v352463. [ 1354.389498] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Creating folder: Instances. Parent ref: group-v352501. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1354.389724] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-64670a09-c0e9-44fd-b06a-ae38ebfff518 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.398798] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Created folder: Instances in parent group-v352501. [ 1354.399047] env[61215]: DEBUG oslo.service.loopingcall [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1354.399241] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Creating VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1354.399444] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8e650ef3-1c4b-46dc-a3ea-fdee68b72fcc {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.419167] env[61215]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1354.419167] env[61215]: value = "task-1690307" [ 1354.419167] env[61215]: _type = "Task" [ 1354.419167] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.426983] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690307, 'name': CreateVM_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.934600] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690307, 'name': CreateVM_Task} progress is 25%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.432828] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690307, 'name': CreateVM_Task} progress is 25%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.930430] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690307, 'name': CreateVM_Task} progress is 25%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.993347] env[61215]: DEBUG nova.compute.manager [req-61c804ba-4443-444b-a2e9-6b6ad7bce80d req-6619fabc-96e7-4c2a-bbda-6f90d0205cce service nova] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Received event network-changed-d3c842e5-4498-4473-aec9-fe8f2b53a871 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1355.993347] env[61215]: DEBUG nova.compute.manager [req-61c804ba-4443-444b-a2e9-6b6ad7bce80d req-6619fabc-96e7-4c2a-bbda-6f90d0205cce service nova] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Refreshing instance network info cache due to event network-changed-d3c842e5-4498-4473-aec9-fe8f2b53a871. {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 1355.993788] env[61215]: DEBUG oslo_concurrency.lockutils [req-61c804ba-4443-444b-a2e9-6b6ad7bce80d req-6619fabc-96e7-4c2a-bbda-6f90d0205cce service nova] Acquiring lock "refresh_cache-85d35c2a-bdfe-400b-a90c-082b9ae4ce7d" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1355.993788] env[61215]: DEBUG oslo_concurrency.lockutils [req-61c804ba-4443-444b-a2e9-6b6ad7bce80d req-6619fabc-96e7-4c2a-bbda-6f90d0205cce service nova] Acquired lock "refresh_cache-85d35c2a-bdfe-400b-a90c-082b9ae4ce7d" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1355.993933] env[61215]: DEBUG nova.network.neutron [req-61c804ba-4443-444b-a2e9-6b6ad7bce80d req-6619fabc-96e7-4c2a-bbda-6f90d0205cce service nova] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Refreshing network info cache for port d3c842e5-4498-4473-aec9-fe8f2b53a871 {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1356.437352] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690307, 'name': CreateVM_Task, 'duration_secs': 1.751467} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.437352] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Created VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1356.437352] env[61215]: DEBUG oslo_concurrency.lockutils [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1356.437352] env[61215]: DEBUG oslo_concurrency.lockutils [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1356.437352] env[61215]: DEBUG oslo_concurrency.lockutils [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1356.437493] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0de7dd4e-1194-433a-9c4e-1c200dbecaa1 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.445643] env[61215]: DEBUG oslo_vmware.api [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Waiting for the task: (returnval){ [ 1356.445643] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]5233e1c2-ae73-0286-4952-f74578f6027e" [ 1356.445643] env[61215]: _type = "Task" [ 1356.445643] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.454631] env[61215]: DEBUG oslo_vmware.api [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5233e1c2-ae73-0286-4952-f74578f6027e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.464628] env[61215]: DEBUG nova.network.neutron [req-61c804ba-4443-444b-a2e9-6b6ad7bce80d req-6619fabc-96e7-4c2a-bbda-6f90d0205cce service nova] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Updated VIF entry in instance network info cache for port d3c842e5-4498-4473-aec9-fe8f2b53a871. {{(pid=61215) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1356.465089] env[61215]: DEBUG nova.network.neutron [req-61c804ba-4443-444b-a2e9-6b6ad7bce80d req-6619fabc-96e7-4c2a-bbda-6f90d0205cce service nova] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Updating instance_info_cache with network_info: [{"id": "d3c842e5-4498-4473-aec9-fe8f2b53a871", "address": "fa:16:3e:b4:89:83", "network": {"id": "40356ccf-d799-4505-9881-8cd0d08aa42a", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1409087454-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d2a19fe024884a109a7f5818e49c1d98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8cb478a6-872c-4a90-a8db-526b374e82ce", "external-id": "nsx-vlan-transportzone-835", "segmentation_id": 835, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3c842e5-44", "ovs_interfaceid": "d3c842e5-4498-4473-aec9-fe8f2b53a871", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1356.475030] env[61215]: DEBUG oslo_concurrency.lockutils [req-61c804ba-4443-444b-a2e9-6b6ad7bce80d req-6619fabc-96e7-4c2a-bbda-6f90d0205cce service nova] Releasing lock "refresh_cache-85d35c2a-bdfe-400b-a90c-082b9ae4ce7d" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1356.958528] env[61215]: DEBUG oslo_concurrency.lockutils [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1356.958807] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Processing image e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1356.959092] env[61215]: DEBUG oslo_concurrency.lockutils [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1367.764239] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1367.764516] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1369.654925] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1369.654925] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Starting heal instance info cache {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 1369.654925] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Rebuilding the list of instances to heal {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1369.676126] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1369.676302] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1369.676439] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1369.676567] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1369.676691] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1369.676825] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1369.676950] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1369.677250] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1369.677409] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1369.677724] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1369.677724] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Didn't find any instances for network info cache update. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 1369.678158] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1369.678407] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1369.678542] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1369.678672] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61215) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 1370.654617] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1371.653566] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1371.653834] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1371.667332] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1371.667549] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1371.667713] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1371.667866] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61215) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1371.668982] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fe1f37d-9cb5-4f3f-950f-8da4d866e53e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.679042] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-351c5f5c-a6b6-454d-bec4-6cee2e15cae8 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.692972] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dceebf7-8254-45c9-98d2-a56b33eec773 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.699413] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-523a5659-ae62-4beb-aff3-a587c0fae2e3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.728203] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181256MB free_disk=173GB free_vcpus=48 pci_devices=None {{(pid=61215) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1371.728291] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1371.728489] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1371.808219] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 351c2ada-945a-4f0b-8fa9-47e3412c5e05 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1371.808219] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance d2756d6f-d1f6-4408-83a6-3cbae8bf8b04 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1371.808219] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 805748d7-e459-4608-a02d-05ac56c48290 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1371.808219] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance ef0f6995-b272-4a45-a09d-5d8d38ffe23c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1371.808451] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 67068a42-eba7-4529-9ebf-43d6865362b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1371.808451] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance ad40882f-de01-4bee-81dd-e91d07248d22 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1371.808451] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 97dae204-f706-41b5-bf9f-b320d022b2f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1371.808451] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 82698789-4c08-453b-a973-1916d1f94af6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1371.808556] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 0223d7b6-12e1-4418-97f2-012ed41daa7a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1371.808556] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1371.819545] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1371.835526] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance d49f702b-cd29-4491-938c-0291b351ef20 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1371.849564] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 44f29d5d-46d6-433a-972b-f971a04200e1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1371.860320] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 8d807f25-01ea-42b2-b5ed-b8ff2b6c39ad has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1371.870905] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 0ad2b135-a40e-4353-a524-1d66435197bf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1371.882592] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 7d9fbfe1-a62e-41e2-8736-61b2f895598d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1371.891905] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 389bf40e-6d3f-4b37-a6a7-6b18a9281da7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1371.901398] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 9f4a9d38-7536-4804-9fde-0b14a18999b3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1371.910564] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance d99f8ee6-768f-4775-b07e-c84536e7f659 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1371.920731] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 1d14c483-5775-4eda-9173-67b02bd97889 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1371.930823] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 666eca1f-edf0-445d-99f3-428547f01746 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1371.943310] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance f30dda39-422f-433d-9684-f2c7486271fa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1371.954904] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 49d99eb4-905c-409c-97e2-001801f61b38 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1371.965689] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 1e53c769-1b6e-4e9b-805d-9ef8d8db4813 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1371.976245] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 846e6d6a-dc09-4b7e-81d2-3d2023d945c5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1371.986838] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 1678b6d3-d4ef-4497-a240-b43a4837d9d5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1371.997532] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 02249c8d-c2b8-4e58-87eb-aecab70177bf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1372.008762] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1372.019987] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 6bf85a18-78f3-4471-bdc6-b600f90e1700 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1372.030933] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 11f8cfbd-7fdd-4a5f-9fde-477caa043b0d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1372.041892] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 01421743-cbfc-40d9-95aa-6b26422581e4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1372.052744] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance c2d2f172-9d88-4b88-8cf7-bbce01619c73 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1372.065037] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 7f0ff2e7-30e3-425b-beff-061ba242981a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1372.065399] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1372.065652] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1372.516023] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88562303-5239-47eb-ba35-780a3f37fe06 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.524149] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03ff040a-8161-4a07-86c6-d87d9a2f7a5b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.555689] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91452999-8bbf-41fc-b58f-0ebb5ce306c3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.563316] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-103c682a-5491-41ae-8cab-94aaf75e139c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.578784] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1372.587109] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1372.601565] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61215) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1372.601757] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.873s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1399.263779] env[61215]: WARNING oslo_vmware.rw_handles [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1399.263779] env[61215]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1399.263779] env[61215]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1399.263779] env[61215]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1399.263779] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1399.263779] env[61215]: ERROR oslo_vmware.rw_handles response.begin() [ 1399.263779] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1399.263779] env[61215]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1399.263779] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1399.263779] env[61215]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1399.263779] env[61215]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1399.263779] env[61215]: ERROR oslo_vmware.rw_handles [ 1399.264462] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Downloaded image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to vmware_temp/97b603c0-ff2d-42d3-853a-fcdf78342341/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1399.265887] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Caching image {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1399.266199] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Copying Virtual Disk [datastore1] vmware_temp/97b603c0-ff2d-42d3-853a-fcdf78342341/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk to [datastore1] vmware_temp/97b603c0-ff2d-42d3-853a-fcdf78342341/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk {{(pid=61215) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1399.266509] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7aa538b3-91a2-4030-9b91-0c389b7cb95e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.275031] env[61215]: DEBUG oslo_vmware.api [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Waiting for the task: (returnval){ [ 1399.275031] env[61215]: value = "task-1690308" [ 1399.275031] env[61215]: _type = "Task" [ 1399.275031] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.282710] env[61215]: DEBUG oslo_vmware.api [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Task: {'id': task-1690308, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.785682] env[61215]: DEBUG oslo_vmware.exceptions [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Fault InvalidArgument not matched. {{(pid=61215) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1399.785990] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1399.786608] env[61215]: ERROR nova.compute.manager [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1399.786608] env[61215]: Faults: ['InvalidArgument'] [ 1399.786608] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Traceback (most recent call last): [ 1399.786608] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1399.786608] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] yield resources [ 1399.786608] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1399.786608] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] self.driver.spawn(context, instance, image_meta, [ 1399.786608] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1399.786608] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1399.786608] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1399.786608] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] self._fetch_image_if_missing(context, vi) [ 1399.786608] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1399.786996] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] image_cache(vi, tmp_image_ds_loc) [ 1399.786996] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1399.786996] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] vm_util.copy_virtual_disk( [ 1399.786996] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1399.786996] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] session._wait_for_task(vmdk_copy_task) [ 1399.786996] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1399.786996] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] return self.wait_for_task(task_ref) [ 1399.786996] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1399.786996] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] return evt.wait() [ 1399.786996] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1399.786996] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] result = hub.switch() [ 1399.786996] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1399.786996] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] return self.greenlet.switch() [ 1399.787379] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1399.787379] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] self.f(*self.args, **self.kw) [ 1399.787379] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1399.787379] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] raise exceptions.translate_fault(task_info.error) [ 1399.787379] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1399.787379] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Faults: ['InvalidArgument'] [ 1399.787379] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] [ 1399.787379] env[61215]: INFO nova.compute.manager [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Terminating instance [ 1399.789094] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1399.789094] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1399.789239] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d8d27873-7326-48a3-9e3a-2dadd6f5772d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.791544] env[61215]: DEBUG nova.compute.manager [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1399.791736] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1399.792498] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9400d0e-badc-401a-8951-b291414005a1 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.800206] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Unregistering the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1399.801313] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2c74883c-b18e-48a1-8a52-1f21f2d7fb55 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.803029] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1399.803029] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61215) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1399.803659] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42019b63-a59c-443f-a70f-11e99b6fc0b4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.808626] env[61215]: DEBUG oslo_vmware.api [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Waiting for the task: (returnval){ [ 1399.808626] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]5297d8bf-7b5f-fba7-0e5c-7c54dba22659" [ 1399.808626] env[61215]: _type = "Task" [ 1399.808626] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.816626] env[61215]: DEBUG oslo_vmware.api [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5297d8bf-7b5f-fba7-0e5c-7c54dba22659, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.883717] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Unregistered the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1399.883936] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Deleting contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1399.884184] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Deleting the datastore file [datastore1] d2756d6f-d1f6-4408-83a6-3cbae8bf8b04 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1399.884932] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-36eb4e2d-8a88-4a2f-a694-775d4251717f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.891710] env[61215]: DEBUG oslo_vmware.api [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Waiting for the task: (returnval){ [ 1399.891710] env[61215]: value = "task-1690310" [ 1399.891710] env[61215]: _type = "Task" [ 1399.891710] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.899379] env[61215]: DEBUG oslo_vmware.api [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Task: {'id': task-1690310, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.319126] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Preparing fetch location {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1400.319389] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Creating directory with path [datastore1] vmware_temp/ab629826-0647-4610-92d7-1c81a8181cdc/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1400.319624] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dc8cc997-6d55-49d9-a90a-33957c1273a3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.330643] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Created directory with path [datastore1] vmware_temp/ab629826-0647-4610-92d7-1c81a8181cdc/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1400.330838] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Fetch image to [datastore1] vmware_temp/ab629826-0647-4610-92d7-1c81a8181cdc/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1400.331020] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to [datastore1] vmware_temp/ab629826-0647-4610-92d7-1c81a8181cdc/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1400.331783] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52ec2cd5-e3a5-408d-ab18-93a001be0856 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.338266] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b146600-e1f4-42c9-8975-1fb90680a818 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.347231] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3e1ed75-2b24-4a70-954c-3620b091e7df {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.379085] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e83c4be4-1c8e-4bf0-96f5-313d36335322 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.384496] env[61215]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-14da679b-07f9-43f6-9f77-11fc2dd3415c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.399438] env[61215]: DEBUG oslo_vmware.api [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Task: {'id': task-1690310, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.079054} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.399665] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1400.399853] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Deleted contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1400.400041] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1400.400223] env[61215]: INFO nova.compute.manager [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1400.402283] env[61215]: DEBUG nova.compute.claims [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Aborting claim: {{(pid=61215) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1400.402462] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1400.402682] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1400.475670] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1400.532053] env[61215]: DEBUG oslo_vmware.rw_handles [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ab629826-0647-4610-92d7-1c81a8181cdc/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1400.596940] env[61215]: DEBUG oslo_vmware.rw_handles [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Completed reading data from the image iterator. {{(pid=61215) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1400.597250] env[61215]: DEBUG oslo_vmware.rw_handles [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ab629826-0647-4610-92d7-1c81a8181cdc/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1400.898846] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ee66e3f-b026-4504-9b6c-4f42ffa8617b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.907628] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5870209a-0f90-4152-8861-929237124669 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.935804] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bfee452-82a7-4567-b05d-ae4c405721b2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.943012] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bdcab8f-42d4-45dd-8826-cbe1848a192c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.956873] env[61215]: DEBUG nova.compute.provider_tree [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1400.964993] env[61215]: DEBUG nova.scheduler.client.report [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1400.978087] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.575s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1400.978600] env[61215]: ERROR nova.compute.manager [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1400.978600] env[61215]: Faults: ['InvalidArgument'] [ 1400.978600] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Traceback (most recent call last): [ 1400.978600] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1400.978600] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] self.driver.spawn(context, instance, image_meta, [ 1400.978600] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1400.978600] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1400.978600] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1400.978600] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] self._fetch_image_if_missing(context, vi) [ 1400.978600] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1400.978600] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] image_cache(vi, tmp_image_ds_loc) [ 1400.978600] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1400.978950] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] vm_util.copy_virtual_disk( [ 1400.978950] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1400.978950] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] session._wait_for_task(vmdk_copy_task) [ 1400.978950] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1400.978950] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] return self.wait_for_task(task_ref) [ 1400.978950] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1400.978950] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] return evt.wait() [ 1400.978950] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1400.978950] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] result = hub.switch() [ 1400.978950] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1400.978950] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] return self.greenlet.switch() [ 1400.978950] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1400.978950] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] self.f(*self.args, **self.kw) [ 1400.979394] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1400.979394] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] raise exceptions.translate_fault(task_info.error) [ 1400.979394] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1400.979394] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Faults: ['InvalidArgument'] [ 1400.979394] env[61215]: ERROR nova.compute.manager [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] [ 1400.979394] env[61215]: DEBUG nova.compute.utils [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] VimFaultException {{(pid=61215) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1400.980748] env[61215]: DEBUG nova.compute.manager [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Build of instance d2756d6f-d1f6-4408-83a6-3cbae8bf8b04 was re-scheduled: A specified parameter was not correct: fileType [ 1400.980748] env[61215]: Faults: ['InvalidArgument'] {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1400.981130] env[61215]: DEBUG nova.compute.manager [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Unplugging VIFs for instance {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1400.981311] env[61215]: DEBUG nova.compute.manager [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1400.981468] env[61215]: DEBUG nova.compute.manager [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1400.981641] env[61215]: DEBUG nova.network.neutron [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1401.316687] env[61215]: DEBUG nova.network.neutron [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1401.333241] env[61215]: INFO nova.compute.manager [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: d2756d6f-d1f6-4408-83a6-3cbae8bf8b04] Took 0.35 seconds to deallocate network for instance. [ 1401.436029] env[61215]: INFO nova.scheduler.client.report [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Deleted allocations for instance d2756d6f-d1f6-4408-83a6-3cbae8bf8b04 [ 1401.455480] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f5d8b339-5114-4066-8dc9-6c197233a527 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Lock "d2756d6f-d1f6-4408-83a6-3cbae8bf8b04" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 154.300s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1401.476640] env[61215]: DEBUG nova.compute.manager [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1401.538138] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1401.539074] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1401.540086] env[61215]: INFO nova.compute.claims [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1401.958919] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28fe7205-5459-485a-92d9-0368d4b3313c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.966826] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f1b4a6d-721d-4b74-9434-b8946e538a32 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.997319] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5869b8a7-fdd3-4dd7-af06-7d28e78c6734 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.004547] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-012c8c0a-e7f7-4ba1-83fa-5101b1345541 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.018319] env[61215]: DEBUG nova.compute.provider_tree [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1402.026746] env[61215]: DEBUG nova.scheduler.client.report [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1402.041027] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.502s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1402.041473] env[61215]: DEBUG nova.compute.manager [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Start building networks asynchronously for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1402.081200] env[61215]: DEBUG nova.compute.utils [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Using /dev/sd instead of None {{(pid=61215) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1402.082510] env[61215]: DEBUG nova.compute.manager [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Allocating IP information in the background. {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1402.082684] env[61215]: DEBUG nova.network.neutron [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] allocate_for_instance() {{(pid=61215) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1402.094910] env[61215]: DEBUG nova.compute.manager [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Start building block device mappings for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1402.144389] env[61215]: DEBUG nova.policy [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3f6558d5326e4281af588e2eff9f52e5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e535f6fd42b746668582de13b5b7f2d1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61215) authorize /opt/stack/nova/nova/policy.py:203}} [ 1402.162384] env[61215]: DEBUG nova.compute.manager [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Start spawning the instance on the hypervisor. {{(pid=61215) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1402.185492] env[61215]: DEBUG nova.virt.hardware [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:05:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:04:45Z,direct_url=,disk_format='vmdk',id=e91f0c25-9ff9-4937-8440-f47cfb2028bc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9bc3f57c82894c5d9e08b66e77a25dc5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:04:46Z,virtual_size=,visibility=), allow threads: False {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1402.185747] env[61215]: DEBUG nova.virt.hardware [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Flavor limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1402.185909] env[61215]: DEBUG nova.virt.hardware [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Image limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1402.186104] env[61215]: DEBUG nova.virt.hardware [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Flavor pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1402.186290] env[61215]: DEBUG nova.virt.hardware [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Image pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1402.186447] env[61215]: DEBUG nova.virt.hardware [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1402.186656] env[61215]: DEBUG nova.virt.hardware [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1402.186820] env[61215]: DEBUG nova.virt.hardware [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1402.186989] env[61215]: DEBUG nova.virt.hardware [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Got 1 possible topologies {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1402.187171] env[61215]: DEBUG nova.virt.hardware [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1402.187350] env[61215]: DEBUG nova.virt.hardware [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1402.188212] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c8c714b-93d7-4d78-98ff-d0bcb6639390 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.195988] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b5a9525-e69e-40b2-9521-085a3d015251 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.549571] env[61215]: DEBUG nova.network.neutron [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Successfully created port: 54d62074-921a-4008-872f-53e193f0aaea {{(pid=61215) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1403.352993] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Acquiring lock "3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1403.352993] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Lock "3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1403.607718] env[61215]: DEBUG nova.network.neutron [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Successfully updated port: 54d62074-921a-4008-872f-53e193f0aaea {{(pid=61215) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1403.626157] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Acquiring lock "refresh_cache-44c01d19-58c0-43e6-ab53-d2a6e1edd3b9" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1403.626157] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Acquired lock "refresh_cache-44c01d19-58c0-43e6-ab53-d2a6e1edd3b9" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1403.626157] env[61215]: DEBUG nova.network.neutron [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1403.629075] env[61215]: DEBUG nova.compute.manager [req-7b6de65c-4d2f-4e3f-b705-e866bff67b29 req-1756c601-123e-44ba-9373-717df39bc224 service nova] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Received event network-vif-plugged-54d62074-921a-4008-872f-53e193f0aaea {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1403.629277] env[61215]: DEBUG oslo_concurrency.lockutils [req-7b6de65c-4d2f-4e3f-b705-e866bff67b29 req-1756c601-123e-44ba-9373-717df39bc224 service nova] Acquiring lock "44c01d19-58c0-43e6-ab53-d2a6e1edd3b9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1403.629472] env[61215]: DEBUG oslo_concurrency.lockutils [req-7b6de65c-4d2f-4e3f-b705-e866bff67b29 req-1756c601-123e-44ba-9373-717df39bc224 service nova] Lock "44c01d19-58c0-43e6-ab53-d2a6e1edd3b9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1403.629635] env[61215]: DEBUG oslo_concurrency.lockutils [req-7b6de65c-4d2f-4e3f-b705-e866bff67b29 req-1756c601-123e-44ba-9373-717df39bc224 service nova] Lock "44c01d19-58c0-43e6-ab53-d2a6e1edd3b9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1403.630112] env[61215]: DEBUG nova.compute.manager [req-7b6de65c-4d2f-4e3f-b705-e866bff67b29 req-1756c601-123e-44ba-9373-717df39bc224 service nova] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] No waiting events found dispatching network-vif-plugged-54d62074-921a-4008-872f-53e193f0aaea {{(pid=61215) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1403.630112] env[61215]: WARNING nova.compute.manager [req-7b6de65c-4d2f-4e3f-b705-e866bff67b29 req-1756c601-123e-44ba-9373-717df39bc224 service nova] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Received unexpected event network-vif-plugged-54d62074-921a-4008-872f-53e193f0aaea for instance with vm_state building and task_state spawning. [ 1403.686564] env[61215]: DEBUG nova.network.neutron [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1403.930347] env[61215]: DEBUG nova.network.neutron [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Updating instance_info_cache with network_info: [{"id": "54d62074-921a-4008-872f-53e193f0aaea", "address": "fa:16:3e:72:e6:56", "network": {"id": "9da197b7-1882-45ff-86ad-3f088f594f03", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-2026639489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e535f6fd42b746668582de13b5b7f2d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7cd4cea-788c-4e6d-9df8-5d83838e2e2a", "external-id": "nsx-vlan-transportzone-361", "segmentation_id": 361, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54d62074-92", "ovs_interfaceid": "54d62074-921a-4008-872f-53e193f0aaea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1403.944747] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Releasing lock "refresh_cache-44c01d19-58c0-43e6-ab53-d2a6e1edd3b9" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1403.945059] env[61215]: DEBUG nova.compute.manager [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Instance network_info: |[{"id": "54d62074-921a-4008-872f-53e193f0aaea", "address": "fa:16:3e:72:e6:56", "network": {"id": "9da197b7-1882-45ff-86ad-3f088f594f03", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-2026639489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e535f6fd42b746668582de13b5b7f2d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7cd4cea-788c-4e6d-9df8-5d83838e2e2a", "external-id": "nsx-vlan-transportzone-361", "segmentation_id": 361, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54d62074-92", "ovs_interfaceid": "54d62074-921a-4008-872f-53e193f0aaea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1403.945675] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:72:e6:56', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7cd4cea-788c-4e6d-9df8-5d83838e2e2a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '54d62074-921a-4008-872f-53e193f0aaea', 'vif_model': 'vmxnet3'}] {{(pid=61215) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1403.953775] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Creating folder: Project (e535f6fd42b746668582de13b5b7f2d1). Parent ref: group-v352463. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1403.954454] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-89b15260-a19b-4a9f-a04a-284b40c931a2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.964728] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Created folder: Project (e535f6fd42b746668582de13b5b7f2d1) in parent group-v352463. [ 1403.964916] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Creating folder: Instances. Parent ref: group-v352504. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1403.965158] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-88b4ea11-d543-48ae-b46f-eef9d6c3c779 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.973901] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Created folder: Instances in parent group-v352504. [ 1403.974144] env[61215]: DEBUG oslo.service.loopingcall [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1403.974339] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Creating VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1403.974531] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b12f38f9-2183-422f-9418-3e21d5e49db5 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.993055] env[61215]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1403.993055] env[61215]: value = "task-1690313" [ 1403.993055] env[61215]: _type = "Task" [ 1403.993055] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.000563] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690313, 'name': CreateVM_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.504038] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690313, 'name': CreateVM_Task, 'duration_secs': 0.28953} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.504038] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Created VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1404.504207] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1404.504279] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1404.504588] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1404.504846] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae3f7241-38e2-476d-bbf3-b41203e71419 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.509197] env[61215]: DEBUG oslo_vmware.api [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Waiting for the task: (returnval){ [ 1404.509197] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]5248def0-3809-cd6d-432c-878bc3d944dc" [ 1404.509197] env[61215]: _type = "Task" [ 1404.509197] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.516370] env[61215]: DEBUG oslo_vmware.api [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5248def0-3809-cd6d-432c-878bc3d944dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.019965] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1405.020325] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Processing image e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1405.020453] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1405.654681] env[61215]: DEBUG nova.compute.manager [req-d805d98e-2a52-46e6-9c86-ce21f5d87f25 req-dd6f2b15-b2ea-4c3d-80a2-4c4bb16a72d6 service nova] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Received event network-changed-54d62074-921a-4008-872f-53e193f0aaea {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1405.654901] env[61215]: DEBUG nova.compute.manager [req-d805d98e-2a52-46e6-9c86-ce21f5d87f25 req-dd6f2b15-b2ea-4c3d-80a2-4c4bb16a72d6 service nova] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Refreshing instance network info cache due to event network-changed-54d62074-921a-4008-872f-53e193f0aaea. {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 1405.655112] env[61215]: DEBUG oslo_concurrency.lockutils [req-d805d98e-2a52-46e6-9c86-ce21f5d87f25 req-dd6f2b15-b2ea-4c3d-80a2-4c4bb16a72d6 service nova] Acquiring lock "refresh_cache-44c01d19-58c0-43e6-ab53-d2a6e1edd3b9" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1405.655261] env[61215]: DEBUG oslo_concurrency.lockutils [req-d805d98e-2a52-46e6-9c86-ce21f5d87f25 req-dd6f2b15-b2ea-4c3d-80a2-4c4bb16a72d6 service nova] Acquired lock "refresh_cache-44c01d19-58c0-43e6-ab53-d2a6e1edd3b9" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1405.655425] env[61215]: DEBUG nova.network.neutron [req-d805d98e-2a52-46e6-9c86-ce21f5d87f25 req-dd6f2b15-b2ea-4c3d-80a2-4c4bb16a72d6 service nova] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Refreshing network info cache for port 54d62074-921a-4008-872f-53e193f0aaea {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1406.039559] env[61215]: DEBUG nova.network.neutron [req-d805d98e-2a52-46e6-9c86-ce21f5d87f25 req-dd6f2b15-b2ea-4c3d-80a2-4c4bb16a72d6 service nova] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Updated VIF entry in instance network info cache for port 54d62074-921a-4008-872f-53e193f0aaea. {{(pid=61215) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1406.039926] env[61215]: DEBUG nova.network.neutron [req-d805d98e-2a52-46e6-9c86-ce21f5d87f25 req-dd6f2b15-b2ea-4c3d-80a2-4c4bb16a72d6 service nova] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Updating instance_info_cache with network_info: [{"id": "54d62074-921a-4008-872f-53e193f0aaea", "address": "fa:16:3e:72:e6:56", "network": {"id": "9da197b7-1882-45ff-86ad-3f088f594f03", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-2026639489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e535f6fd42b746668582de13b5b7f2d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7cd4cea-788c-4e6d-9df8-5d83838e2e2a", "external-id": "nsx-vlan-transportzone-361", "segmentation_id": 361, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54d62074-92", "ovs_interfaceid": "54d62074-921a-4008-872f-53e193f0aaea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1406.050291] env[61215]: DEBUG oslo_concurrency.lockutils [req-d805d98e-2a52-46e6-9c86-ce21f5d87f25 req-dd6f2b15-b2ea-4c3d-80a2-4c4bb16a72d6 service nova] Releasing lock "refresh_cache-44c01d19-58c0-43e6-ab53-d2a6e1edd3b9" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1429.598019] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1429.654478] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1429.654565] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Starting heal instance info cache {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 1429.654719] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Rebuilding the list of instances to heal {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1429.674301] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1429.674463] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1429.674735] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1429.674892] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1429.675030] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1429.675159] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1429.675285] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1429.675431] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1429.675585] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1429.675727] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1429.675846] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Didn't find any instances for network info cache update. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 1429.676345] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1429.676549] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1430.654077] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1431.649599] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1431.671778] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1431.671778] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61215) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 1431.672114] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1431.683908] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1431.684157] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1431.684343] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1431.684501] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61215) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1431.685622] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37b0f9a4-e034-4704-a8e1-5a6c8c466d06 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.694519] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d9fb9bc-b5f0-4333-9054-ed9cafcd8f98 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.710293] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a310714-84f0-4997-ae81-01305e6216c8 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.718211] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80a036fd-7a66-438e-be6d-0ae78d8aec48 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.748385] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181310MB free_disk=173GB free_vcpus=48 pci_devices=None {{(pid=61215) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1431.748553] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1431.748736] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1431.824018] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 351c2ada-945a-4f0b-8fa9-47e3412c5e05 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1431.824018] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 805748d7-e459-4608-a02d-05ac56c48290 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1431.824018] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance ef0f6995-b272-4a45-a09d-5d8d38ffe23c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1431.824018] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 67068a42-eba7-4529-9ebf-43d6865362b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1431.824433] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance ad40882f-de01-4bee-81dd-e91d07248d22 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1431.824433] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 97dae204-f706-41b5-bf9f-b320d022b2f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1431.824433] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 82698789-4c08-453b-a973-1916d1f94af6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1431.824433] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 0223d7b6-12e1-4418-97f2-012ed41daa7a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1431.824654] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1431.824654] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1431.835106] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance d49f702b-cd29-4491-938c-0291b351ef20 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1431.847658] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 44f29d5d-46d6-433a-972b-f971a04200e1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1431.858374] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 8d807f25-01ea-42b2-b5ed-b8ff2b6c39ad has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1431.869212] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 0ad2b135-a40e-4353-a524-1d66435197bf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1431.879228] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 7d9fbfe1-a62e-41e2-8736-61b2f895598d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1431.889567] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 389bf40e-6d3f-4b37-a6a7-6b18a9281da7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1431.900309] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 9f4a9d38-7536-4804-9fde-0b14a18999b3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1431.910729] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance d99f8ee6-768f-4775-b07e-c84536e7f659 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1431.924706] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 1d14c483-5775-4eda-9173-67b02bd97889 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1431.937901] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 666eca1f-edf0-445d-99f3-428547f01746 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1431.950023] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance f30dda39-422f-433d-9684-f2c7486271fa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1431.980431] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 49d99eb4-905c-409c-97e2-001801f61b38 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1431.992766] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 1e53c769-1b6e-4e9b-805d-9ef8d8db4813 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1432.004546] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 846e6d6a-dc09-4b7e-81d2-3d2023d945c5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1432.016943] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 1678b6d3-d4ef-4497-a240-b43a4837d9d5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1432.029030] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 02249c8d-c2b8-4e58-87eb-aecab70177bf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1432.056715] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1432.068410] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 6bf85a18-78f3-4471-bdc6-b600f90e1700 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1432.079880] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 11f8cfbd-7fdd-4a5f-9fde-477caa043b0d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1432.092156] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 01421743-cbfc-40d9-95aa-6b26422581e4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1432.108752] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance c2d2f172-9d88-4b88-8cf7-bbce01619c73 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1432.119917] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 7f0ff2e7-30e3-425b-beff-061ba242981a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1432.132328] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1432.132589] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1432.132740] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1432.570718] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f99c76bf-6c55-4744-b1a0-88e13a7d9e2f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.579732] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb4d62f3-424b-444d-b586-2e81ea3c92a1 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.611805] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bd6b80a-1ae5-4666-998b-33a91cdf8a95 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.620033] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9618869f-df8c-4bfa-955d-31fae5f0c4fd {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.633433] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1432.643228] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1432.662087] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61215) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1432.662087] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.913s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1433.644771] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1433.654451] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1443.402665] env[61215]: DEBUG oslo_concurrency.lockutils [None req-137995a1-3b0c-4493-a609-f1c3bd63e53c tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Acquiring lock "351c2ada-945a-4f0b-8fa9-47e3412c5e05" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1445.843202] env[61215]: DEBUG oslo_concurrency.lockutils [None req-4f6a192d-32d6-467f-96a6-eaad6e73247b tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Acquiring lock "805748d7-e459-4608-a02d-05ac56c48290" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1446.039781] env[61215]: WARNING oslo_vmware.rw_handles [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1446.039781] env[61215]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1446.039781] env[61215]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1446.039781] env[61215]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1446.039781] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1446.039781] env[61215]: ERROR oslo_vmware.rw_handles response.begin() [ 1446.039781] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1446.039781] env[61215]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1446.039781] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1446.039781] env[61215]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1446.039781] env[61215]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1446.039781] env[61215]: ERROR oslo_vmware.rw_handles [ 1446.040252] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Downloaded image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to vmware_temp/ab629826-0647-4610-92d7-1c81a8181cdc/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1446.042467] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Caching image {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1446.042595] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Copying Virtual Disk [datastore1] vmware_temp/ab629826-0647-4610-92d7-1c81a8181cdc/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk to [datastore1] vmware_temp/ab629826-0647-4610-92d7-1c81a8181cdc/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk {{(pid=61215) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1446.042932] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eab12ab2-ca34-482a-911c-5a7b619c261f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.050797] env[61215]: DEBUG oslo_vmware.api [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Waiting for the task: (returnval){ [ 1446.050797] env[61215]: value = "task-1690314" [ 1446.050797] env[61215]: _type = "Task" [ 1446.050797] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.059656] env[61215]: DEBUG oslo_vmware.api [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Task: {'id': task-1690314, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.561035] env[61215]: DEBUG oslo_vmware.exceptions [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Fault InvalidArgument not matched. {{(pid=61215) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1446.561328] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1446.561929] env[61215]: ERROR nova.compute.manager [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1446.561929] env[61215]: Faults: ['InvalidArgument'] [ 1446.561929] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Traceback (most recent call last): [ 1446.561929] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1446.561929] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] yield resources [ 1446.561929] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1446.561929] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] self.driver.spawn(context, instance, image_meta, [ 1446.561929] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1446.561929] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1446.561929] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1446.561929] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] self._fetch_image_if_missing(context, vi) [ 1446.561929] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1446.562315] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] image_cache(vi, tmp_image_ds_loc) [ 1446.562315] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1446.562315] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] vm_util.copy_virtual_disk( [ 1446.562315] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1446.562315] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] session._wait_for_task(vmdk_copy_task) [ 1446.562315] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1446.562315] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] return self.wait_for_task(task_ref) [ 1446.562315] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1446.562315] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] return evt.wait() [ 1446.562315] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1446.562315] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] result = hub.switch() [ 1446.562315] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1446.562315] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] return self.greenlet.switch() [ 1446.562702] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1446.562702] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] self.f(*self.args, **self.kw) [ 1446.562702] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1446.562702] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] raise exceptions.translate_fault(task_info.error) [ 1446.562702] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1446.562702] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Faults: ['InvalidArgument'] [ 1446.562702] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] [ 1446.562702] env[61215]: INFO nova.compute.manager [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Terminating instance [ 1446.564056] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1446.564056] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1446.564281] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8b5df646-f312-4c61-a882-f24ff221144e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.566478] env[61215]: DEBUG nova.compute.manager [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1446.566693] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1446.567416] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57edd0d2-fee7-4c99-8904-6f666706adf9 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.574711] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Unregistering the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1446.575581] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c54ff6ab-a648-4ef4-ad65-3ac23c6637e8 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.576965] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1446.577152] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61215) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1446.577816] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-029f5f88-374f-4735-a582-64e6d5121cfb {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.582454] env[61215]: DEBUG oslo_vmware.api [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Waiting for the task: (returnval){ [ 1446.582454] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52a77d84-30b2-b6b4-ac45-147f48ae0986" [ 1446.582454] env[61215]: _type = "Task" [ 1446.582454] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.589206] env[61215]: DEBUG oslo_vmware.api [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52a77d84-30b2-b6b4-ac45-147f48ae0986, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.660521] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Unregistered the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1446.660694] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Deleting contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1446.660915] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Deleting the datastore file [datastore1] 351c2ada-945a-4f0b-8fa9-47e3412c5e05 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1446.661196] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e9efb13c-2fbd-4b8b-8c85-798a45c74103 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.666975] env[61215]: DEBUG oslo_vmware.api [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Waiting for the task: (returnval){ [ 1446.666975] env[61215]: value = "task-1690316" [ 1446.666975] env[61215]: _type = "Task" [ 1446.666975] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.675761] env[61215]: DEBUG oslo_vmware.api [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Task: {'id': task-1690316, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.864983] env[61215]: DEBUG oslo_concurrency.lockutils [None req-dba10e73-4705-48ac-9212-154e5afc7f6c tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Acquiring lock "ef0f6995-b272-4a45-a09d-5d8d38ffe23c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1447.092421] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Preparing fetch location {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1447.092680] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Creating directory with path [datastore1] vmware_temp/13405e88-2d66-4f0b-88ab-47d17d144095/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1447.092917] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e96ea8aa-1a77-43c7-900e-cf0a094c548d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.104806] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Created directory with path [datastore1] vmware_temp/13405e88-2d66-4f0b-88ab-47d17d144095/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1447.105020] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Fetch image to [datastore1] vmware_temp/13405e88-2d66-4f0b-88ab-47d17d144095/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1447.105204] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to [datastore1] vmware_temp/13405e88-2d66-4f0b-88ab-47d17d144095/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1447.105930] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6de0efa-c095-4c98-b648-3e3f1aacf59a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.113463] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9308b10-8d3f-4199-a5e1-0c087ccec187 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.122905] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-160d04c0-652b-41fd-9dd0-52932fddef35 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.154753] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-041e1b5f-c01b-4c4d-abec-b9c76ad18c37 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.161195] env[61215]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-61797be8-7307-4e17-911f-19340f971591 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.175569] env[61215]: DEBUG oslo_vmware.api [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Task: {'id': task-1690316, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.071925} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.175769] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1447.176012] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Deleted contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1447.176144] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1447.176321] env[61215]: INFO nova.compute.manager [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1447.178510] env[61215]: DEBUG nova.compute.claims [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Aborting claim: {{(pid=61215) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1447.178647] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1447.178862] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1447.186048] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1447.239289] env[61215]: DEBUG oslo_vmware.rw_handles [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/13405e88-2d66-4f0b-88ab-47d17d144095/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1447.304358] env[61215]: DEBUG oslo_vmware.rw_handles [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Completed reading data from the image iterator. {{(pid=61215) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1447.304465] env[61215]: DEBUG oslo_vmware.rw_handles [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/13405e88-2d66-4f0b-88ab-47d17d144095/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1447.671810] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f4d9a47-ab9b-4960-9429-704756c0fd7c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.679809] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9116257-df73-470a-8d12-da0a4ac10efd {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.709819] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b56dacf4-4f8f-4432-8955-72aa7b525369 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.717082] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c233b47-a1fb-4b0d-bda4-92042bb9d493 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.732059] env[61215]: DEBUG nova.compute.provider_tree [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1447.740850] env[61215]: DEBUG nova.scheduler.client.report [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1447.755821] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.577s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1447.756393] env[61215]: ERROR nova.compute.manager [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1447.756393] env[61215]: Faults: ['InvalidArgument'] [ 1447.756393] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Traceback (most recent call last): [ 1447.756393] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1447.756393] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] self.driver.spawn(context, instance, image_meta, [ 1447.756393] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1447.756393] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1447.756393] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1447.756393] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] self._fetch_image_if_missing(context, vi) [ 1447.756393] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1447.756393] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] image_cache(vi, tmp_image_ds_loc) [ 1447.756393] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1447.756786] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] vm_util.copy_virtual_disk( [ 1447.756786] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1447.756786] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] session._wait_for_task(vmdk_copy_task) [ 1447.756786] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1447.756786] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] return self.wait_for_task(task_ref) [ 1447.756786] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1447.756786] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] return evt.wait() [ 1447.756786] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1447.756786] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] result = hub.switch() [ 1447.756786] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1447.756786] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] return self.greenlet.switch() [ 1447.756786] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1447.756786] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] self.f(*self.args, **self.kw) [ 1447.757103] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1447.757103] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] raise exceptions.translate_fault(task_info.error) [ 1447.757103] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1447.757103] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Faults: ['InvalidArgument'] [ 1447.757103] env[61215]: ERROR nova.compute.manager [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] [ 1447.757224] env[61215]: DEBUG nova.compute.utils [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] VimFaultException {{(pid=61215) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1447.758815] env[61215]: DEBUG nova.compute.manager [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Build of instance 351c2ada-945a-4f0b-8fa9-47e3412c5e05 was re-scheduled: A specified parameter was not correct: fileType [ 1447.758815] env[61215]: Faults: ['InvalidArgument'] {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1447.759465] env[61215]: DEBUG nova.compute.manager [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Unplugging VIFs for instance {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1447.759657] env[61215]: DEBUG nova.compute.manager [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1447.759841] env[61215]: DEBUG nova.compute.manager [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1447.760035] env[61215]: DEBUG nova.network.neutron [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1448.129351] env[61215]: DEBUG nova.network.neutron [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1448.143351] env[61215]: INFO nova.compute.manager [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Took 0.38 seconds to deallocate network for instance. [ 1448.272734] env[61215]: INFO nova.scheduler.client.report [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Deleted allocations for instance 351c2ada-945a-4f0b-8fa9-47e3412c5e05 [ 1448.293501] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0bcbd8c4-54ed-450a-8682-2ef6627f7b7e tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Lock "351c2ada-945a-4f0b-8fa9-47e3412c5e05" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 201.745s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1448.294685] env[61215]: DEBUG oslo_concurrency.lockutils [None req-137995a1-3b0c-4493-a609-f1c3bd63e53c tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Lock "351c2ada-945a-4f0b-8fa9-47e3412c5e05" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 4.892s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1448.294915] env[61215]: DEBUG oslo_concurrency.lockutils [None req-137995a1-3b0c-4493-a609-f1c3bd63e53c tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Acquiring lock "351c2ada-945a-4f0b-8fa9-47e3412c5e05-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1448.295139] env[61215]: DEBUG oslo_concurrency.lockutils [None req-137995a1-3b0c-4493-a609-f1c3bd63e53c tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Lock "351c2ada-945a-4f0b-8fa9-47e3412c5e05-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1448.295359] env[61215]: DEBUG oslo_concurrency.lockutils [None req-137995a1-3b0c-4493-a609-f1c3bd63e53c tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Lock "351c2ada-945a-4f0b-8fa9-47e3412c5e05-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1448.301298] env[61215]: INFO nova.compute.manager [None req-137995a1-3b0c-4493-a609-f1c3bd63e53c tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Terminating instance [ 1448.301298] env[61215]: DEBUG nova.compute.manager [None req-137995a1-3b0c-4493-a609-f1c3bd63e53c tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1448.301298] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-137995a1-3b0c-4493-a609-f1c3bd63e53c tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1448.301298] env[61215]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b4962c68-39bf-489a-9c4d-192a037271b2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.308893] env[61215]: DEBUG nova.compute.manager [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1448.315407] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d16f68f8-1439-4219-88b0-7219fd72cbb5 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.346219] env[61215]: WARNING nova.virt.vmwareapi.vmops [None req-137995a1-3b0c-4493-a609-f1c3bd63e53c tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 351c2ada-945a-4f0b-8fa9-47e3412c5e05 could not be found. [ 1448.346320] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-137995a1-3b0c-4493-a609-f1c3bd63e53c tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1448.346489] env[61215]: INFO nova.compute.manager [None req-137995a1-3b0c-4493-a609-f1c3bd63e53c tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1448.346782] env[61215]: DEBUG oslo.service.loopingcall [None req-137995a1-3b0c-4493-a609-f1c3bd63e53c tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1448.347026] env[61215]: DEBUG nova.compute.manager [-] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1448.347134] env[61215]: DEBUG nova.network.neutron [-] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1448.363888] env[61215]: DEBUG oslo_concurrency.lockutils [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1448.364139] env[61215]: DEBUG oslo_concurrency.lockutils [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1448.365666] env[61215]: INFO nova.compute.claims [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1448.386984] env[61215]: DEBUG nova.network.neutron [-] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1448.401824] env[61215]: INFO nova.compute.manager [-] [instance: 351c2ada-945a-4f0b-8fa9-47e3412c5e05] Took 0.05 seconds to deallocate network for instance. [ 1448.523479] env[61215]: DEBUG oslo_concurrency.lockutils [None req-137995a1-3b0c-4493-a609-f1c3bd63e53c tempest-ServerDiagnosticsTest-1752338977 tempest-ServerDiagnosticsTest-1752338977-project-member] Lock "351c2ada-945a-4f0b-8fa9-47e3412c5e05" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.228s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1448.798107] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00973863-a82d-4529-8124-ed6d2add269d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.806386] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1336c385-04e2-4159-b8e6-8054fced20b4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.837673] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35054b3e-bb54-4486-b02a-5ddc900ca491 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.845464] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20907b5d-01a9-4289-bd80-8e55adf5a7f4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.858896] env[61215]: DEBUG nova.compute.provider_tree [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1448.867874] env[61215]: DEBUG nova.scheduler.client.report [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1448.880891] env[61215]: DEBUG oslo_concurrency.lockutils [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.517s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1448.881405] env[61215]: DEBUG nova.compute.manager [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Start building networks asynchronously for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1448.914347] env[61215]: DEBUG nova.compute.utils [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Using /dev/sd instead of None {{(pid=61215) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1448.915615] env[61215]: DEBUG nova.compute.manager [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Allocating IP information in the background. {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1448.915785] env[61215]: DEBUG nova.network.neutron [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] allocate_for_instance() {{(pid=61215) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1448.928373] env[61215]: DEBUG nova.compute.manager [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Start building block device mappings for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1448.991462] env[61215]: DEBUG nova.compute.manager [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Start spawning the instance on the hypervisor. {{(pid=61215) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1449.007360] env[61215]: DEBUG nova.policy [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c19016beabda4063a3e381938580557e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '17ae2382dbda441bb0bc4a90bd2a7baf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61215) authorize /opt/stack/nova/nova/policy.py:203}} [ 1449.017147] env[61215]: DEBUG nova.virt.hardware [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:17:25Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='1353182181',id=27,is_public=True,memory_mb=128,name='tempest-flavor_with_ephemeral_0-1020479057',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:04:45Z,direct_url=,disk_format='vmdk',id=e91f0c25-9ff9-4937-8440-f47cfb2028bc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9bc3f57c82894c5d9e08b66e77a25dc5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:04:46Z,virtual_size=,visibility=), allow threads: False {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1449.017418] env[61215]: DEBUG nova.virt.hardware [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Flavor limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1449.017582] env[61215]: DEBUG nova.virt.hardware [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Image limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1449.017772] env[61215]: DEBUG nova.virt.hardware [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Flavor pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1449.017923] env[61215]: DEBUG nova.virt.hardware [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Image pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1449.018093] env[61215]: DEBUG nova.virt.hardware [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1449.018313] env[61215]: DEBUG nova.virt.hardware [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1449.018477] env[61215]: DEBUG nova.virt.hardware [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1449.018645] env[61215]: DEBUG nova.virt.hardware [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Got 1 possible topologies {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1449.018812] env[61215]: DEBUG nova.virt.hardware [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1449.018992] env[61215]: DEBUG nova.virt.hardware [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1449.019908] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-381b20e2-5a96-49d1-8a98-126dd4dcb5fe {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.028158] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb4528f9-0980-42b6-bfc0-0a3496fe60d0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.500687] env[61215]: DEBUG nova.network.neutron [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Successfully created port: 122ee9cb-8f07-4220-89c8-b04e9146c9c6 {{(pid=61215) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1450.479486] env[61215]: DEBUG nova.compute.manager [req-b9c81bf6-ca52-4002-8ab3-25907c448f87 req-cf637a91-cda5-4276-b540-1c33e407a99a service nova] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Received event network-vif-plugged-122ee9cb-8f07-4220-89c8-b04e9146c9c6 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1450.479818] env[61215]: DEBUG oslo_concurrency.lockutils [req-b9c81bf6-ca52-4002-8ab3-25907c448f87 req-cf637a91-cda5-4276-b540-1c33e407a99a service nova] Acquiring lock "d49f702b-cd29-4491-938c-0291b351ef20-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1450.480643] env[61215]: DEBUG oslo_concurrency.lockutils [req-b9c81bf6-ca52-4002-8ab3-25907c448f87 req-cf637a91-cda5-4276-b540-1c33e407a99a service nova] Lock "d49f702b-cd29-4491-938c-0291b351ef20-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1450.480899] env[61215]: DEBUG oslo_concurrency.lockutils [req-b9c81bf6-ca52-4002-8ab3-25907c448f87 req-cf637a91-cda5-4276-b540-1c33e407a99a service nova] Lock "d49f702b-cd29-4491-938c-0291b351ef20-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1450.481111] env[61215]: DEBUG nova.compute.manager [req-b9c81bf6-ca52-4002-8ab3-25907c448f87 req-cf637a91-cda5-4276-b540-1c33e407a99a service nova] [instance: d49f702b-cd29-4491-938c-0291b351ef20] No waiting events found dispatching network-vif-plugged-122ee9cb-8f07-4220-89c8-b04e9146c9c6 {{(pid=61215) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1450.481338] env[61215]: WARNING nova.compute.manager [req-b9c81bf6-ca52-4002-8ab3-25907c448f87 req-cf637a91-cda5-4276-b540-1c33e407a99a service nova] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Received unexpected event network-vif-plugged-122ee9cb-8f07-4220-89c8-b04e9146c9c6 for instance with vm_state building and task_state spawning. [ 1450.508819] env[61215]: DEBUG nova.network.neutron [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Successfully updated port: 122ee9cb-8f07-4220-89c8-b04e9146c9c6 {{(pid=61215) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1450.521087] env[61215]: DEBUG oslo_concurrency.lockutils [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Acquiring lock "refresh_cache-d49f702b-cd29-4491-938c-0291b351ef20" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1450.521274] env[61215]: DEBUG oslo_concurrency.lockutils [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Acquired lock "refresh_cache-d49f702b-cd29-4491-938c-0291b351ef20" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1450.523381] env[61215]: DEBUG nova.network.neutron [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1450.606559] env[61215]: DEBUG nova.network.neutron [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1451.144981] env[61215]: DEBUG nova.network.neutron [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Updating instance_info_cache with network_info: [{"id": "122ee9cb-8f07-4220-89c8-b04e9146c9c6", "address": "fa:16:3e:bb:f3:fa", "network": {"id": "6ce33a19-d062-4884-814d-ddd461771877", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-295433955-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17ae2382dbda441bb0bc4a90bd2a7baf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac7039c0-3374-4c08-87fc-af2449b48b02", "external-id": "nsx-vlan-transportzone-592", "segmentation_id": 592, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap122ee9cb-8f", "ovs_interfaceid": "122ee9cb-8f07-4220-89c8-b04e9146c9c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1451.161420] env[61215]: DEBUG oslo_concurrency.lockutils [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Releasing lock "refresh_cache-d49f702b-cd29-4491-938c-0291b351ef20" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1451.162461] env[61215]: DEBUG nova.compute.manager [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Instance network_info: |[{"id": "122ee9cb-8f07-4220-89c8-b04e9146c9c6", "address": "fa:16:3e:bb:f3:fa", "network": {"id": "6ce33a19-d062-4884-814d-ddd461771877", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-295433955-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17ae2382dbda441bb0bc4a90bd2a7baf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac7039c0-3374-4c08-87fc-af2449b48b02", "external-id": "nsx-vlan-transportzone-592", "segmentation_id": 592, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap122ee9cb-8f", "ovs_interfaceid": "122ee9cb-8f07-4220-89c8-b04e9146c9c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1451.163234] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bb:f3:fa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ac7039c0-3374-4c08-87fc-af2449b48b02', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '122ee9cb-8f07-4220-89c8-b04e9146c9c6', 'vif_model': 'vmxnet3'}] {{(pid=61215) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1451.172710] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Creating folder: Project (17ae2382dbda441bb0bc4a90bd2a7baf). Parent ref: group-v352463. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1451.173200] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-64e2e99c-1d2a-4648-9796-ebb65c8af3d3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.189030] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Created folder: Project (17ae2382dbda441bb0bc4a90bd2a7baf) in parent group-v352463. [ 1451.189030] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Creating folder: Instances. Parent ref: group-v352507. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1451.189030] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-904a8455-11c7-4496-ad63-02573e3bdf54 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.197964] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Created folder: Instances in parent group-v352507. [ 1451.198238] env[61215]: DEBUG oslo.service.loopingcall [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1451.198428] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Creating VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1451.198626] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-993d1b1a-5dee-4fbc-b584-a1aab4fb4ea0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.220043] env[61215]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1451.220043] env[61215]: value = "task-1690319" [ 1451.220043] env[61215]: _type = "Task" [ 1451.220043] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1451.227834] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690319, 'name': CreateVM_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.733984] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690319, 'name': CreateVM_Task, 'duration_secs': 0.319982} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.733984] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Created VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1451.733984] env[61215]: DEBUG oslo_concurrency.lockutils [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1451.733984] env[61215]: DEBUG oslo_concurrency.lockutils [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1451.733984] env[61215]: DEBUG oslo_concurrency.lockutils [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1451.734585] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5118938-e7b1-412c-948b-4484b8b4e054 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.738204] env[61215]: DEBUG oslo_vmware.api [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Waiting for the task: (returnval){ [ 1451.738204] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]5235b44b-545b-44d3-5009-1949d7ce4136" [ 1451.738204] env[61215]: _type = "Task" [ 1451.738204] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1451.745376] env[61215]: DEBUG oslo_vmware.api [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5235b44b-545b-44d3-5009-1949d7ce4136, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.249429] env[61215]: DEBUG oslo_concurrency.lockutils [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1452.249695] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Processing image e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1452.249916] env[61215]: DEBUG oslo_concurrency.lockutils [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1452.867971] env[61215]: DEBUG nova.compute.manager [req-8197a435-431e-4ab1-a5be-172fc6ca58d9 req-21f7e82e-58e3-4e47-9f75-29dede010074 service nova] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Received event network-changed-122ee9cb-8f07-4220-89c8-b04e9146c9c6 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1452.867971] env[61215]: DEBUG nova.compute.manager [req-8197a435-431e-4ab1-a5be-172fc6ca58d9 req-21f7e82e-58e3-4e47-9f75-29dede010074 service nova] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Refreshing instance network info cache due to event network-changed-122ee9cb-8f07-4220-89c8-b04e9146c9c6. {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 1452.868236] env[61215]: DEBUG oslo_concurrency.lockutils [req-8197a435-431e-4ab1-a5be-172fc6ca58d9 req-21f7e82e-58e3-4e47-9f75-29dede010074 service nova] Acquiring lock "refresh_cache-d49f702b-cd29-4491-938c-0291b351ef20" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1452.868340] env[61215]: DEBUG oslo_concurrency.lockutils [req-8197a435-431e-4ab1-a5be-172fc6ca58d9 req-21f7e82e-58e3-4e47-9f75-29dede010074 service nova] Acquired lock "refresh_cache-d49f702b-cd29-4491-938c-0291b351ef20" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1452.868497] env[61215]: DEBUG nova.network.neutron [req-8197a435-431e-4ab1-a5be-172fc6ca58d9 req-21f7e82e-58e3-4e47-9f75-29dede010074 service nova] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Refreshing network info cache for port 122ee9cb-8f07-4220-89c8-b04e9146c9c6 {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1453.284149] env[61215]: DEBUG nova.network.neutron [req-8197a435-431e-4ab1-a5be-172fc6ca58d9 req-21f7e82e-58e3-4e47-9f75-29dede010074 service nova] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Updated VIF entry in instance network info cache for port 122ee9cb-8f07-4220-89c8-b04e9146c9c6. {{(pid=61215) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1453.284525] env[61215]: DEBUG nova.network.neutron [req-8197a435-431e-4ab1-a5be-172fc6ca58d9 req-21f7e82e-58e3-4e47-9f75-29dede010074 service nova] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Updating instance_info_cache with network_info: [{"id": "122ee9cb-8f07-4220-89c8-b04e9146c9c6", "address": "fa:16:3e:bb:f3:fa", "network": {"id": "6ce33a19-d062-4884-814d-ddd461771877", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-295433955-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17ae2382dbda441bb0bc4a90bd2a7baf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ac7039c0-3374-4c08-87fc-af2449b48b02", "external-id": "nsx-vlan-transportzone-592", "segmentation_id": 592, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap122ee9cb-8f", "ovs_interfaceid": "122ee9cb-8f07-4220-89c8-b04e9146c9c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1453.297743] env[61215]: DEBUG oslo_concurrency.lockutils [req-8197a435-431e-4ab1-a5be-172fc6ca58d9 req-21f7e82e-58e3-4e47-9f75-29dede010074 service nova] Releasing lock "refresh_cache-d49f702b-cd29-4491-938c-0291b351ef20" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1455.108978] env[61215]: DEBUG oslo_concurrency.lockutils [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Acquiring lock "8d4665c7-67de-4ab3-a8b7-596a5e1152ce" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1455.113013] env[61215]: DEBUG oslo_concurrency.lockutils [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Lock "8d4665c7-67de-4ab3-a8b7-596a5e1152ce" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1457.452813] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a82dff54-aca3-42a3-9ae1-818021b01ae2 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Acquiring lock "67068a42-eba7-4529-9ebf-43d6865362b1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1462.619070] env[61215]: DEBUG oslo_concurrency.lockutils [None req-634da85f-a5d7-4f72-9fc5-c772e74bd6bd tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Acquiring lock "ad40882f-de01-4bee-81dd-e91d07248d22" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1468.389127] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9453684d-6010-41d2-91ac-923c13fc9127 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Acquiring lock "97dae204-f706-41b5-bf9f-b320d022b2f3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1474.526502] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ae2513eb-6a21-476a-b92f-072d8e2ed7c0 tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Acquiring lock "82698789-4c08-453b-a973-1916d1f94af6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1480.283672] env[61215]: DEBUG oslo_concurrency.lockutils [None req-e3e5258b-7348-455a-a3a9-a84636ea497b tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Acquiring lock "0223d7b6-12e1-4418-97f2-012ed41daa7a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1480.914271] env[61215]: DEBUG oslo_concurrency.lockutils [None req-16dc8d6f-eb38-4ea3-855e-f0ad85dfaf08 tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Acquiring lock "44c01d19-58c0-43e6-ab53-d2a6e1edd3b9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1481.167483] env[61215]: DEBUG oslo_concurrency.lockutils [None req-361d7434-e052-4d23-b682-f02d37c50b7c tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Acquiring lock "d49f702b-cd29-4491-938c-0291b351ef20" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1487.911620] env[61215]: DEBUG oslo_concurrency.lockutils [None req-e2747d29-820f-4f72-8540-d12fdd6496d7 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Acquiring lock "85d35c2a-bdfe-400b-a90c-082b9ae4ce7d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1488.650335] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1489.658468] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1489.658468] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Cleaning up deleted instances with incomplete migration {{(pid=61215) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11258}} [ 1490.671869] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1490.672223] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Starting heal instance info cache {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 1490.672223] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Rebuilding the list of instances to heal {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1490.695715] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1490.695875] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1490.696016] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1490.696164] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1490.696292] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1490.696421] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1490.696569] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1490.696707] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1490.696830] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1490.696950] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1490.697125] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Didn't find any instances for network info cache update. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 1491.654149] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1491.654415] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1492.657023] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1492.657023] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1492.657023] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1492.657023] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61215) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 1492.657023] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1493.668386] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1493.668690] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1493.685736] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1493.685736] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1493.685736] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1493.686831] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61215) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1493.687286] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0757a864-a90b-4a3e-9642-243fcd8d0250 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.699837] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21fc6039-8166-4b8c-a198-acc1df45874e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.714819] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5cdfc9a-0a06-48dc-be36-81316c252678 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.721807] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfbf8384-3ce3-468b-8c63-ed63a46f3346 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.753926] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181332MB free_disk=173GB free_vcpus=48 pci_devices=None {{(pid=61215) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1493.754134] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1493.754305] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1493.929964] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 805748d7-e459-4608-a02d-05ac56c48290 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1493.930183] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance ef0f6995-b272-4a45-a09d-5d8d38ffe23c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1493.930452] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 67068a42-eba7-4529-9ebf-43d6865362b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1493.930603] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance ad40882f-de01-4bee-81dd-e91d07248d22 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1493.930916] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 97dae204-f706-41b5-bf9f-b320d022b2f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1493.931128] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 82698789-4c08-453b-a973-1916d1f94af6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1493.931464] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 0223d7b6-12e1-4418-97f2-012ed41daa7a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1493.931645] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1493.931784] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1493.931908] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance d49f702b-cd29-4491-938c-0291b351ef20 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1493.960356] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquiring lock "c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1493.960650] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Lock "c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1493.962009] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 1e53c769-1b6e-4e9b-805d-9ef8d8db4813 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1493.972615] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 846e6d6a-dc09-4b7e-81d2-3d2023d945c5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1493.986715] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 1678b6d3-d4ef-4497-a240-b43a4837d9d5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1493.998404] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 02249c8d-c2b8-4e58-87eb-aecab70177bf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1494.010199] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1494.020989] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 6bf85a18-78f3-4471-bdc6-b600f90e1700 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1494.031649] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 11f8cfbd-7fdd-4a5f-9fde-477caa043b0d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1494.042743] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 01421743-cbfc-40d9-95aa-6b26422581e4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1494.053943] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance c2d2f172-9d88-4b88-8cf7-bbce01619c73 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1494.067388] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 7f0ff2e7-30e3-425b-beff-061ba242981a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1494.086463] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1494.098254] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 8d4665c7-67de-4ab3-a8b7-596a5e1152ce has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1494.113316] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1494.113580] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1494.113738] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1494.130500] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Refreshing inventories for resource provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1494.151400] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Updating ProviderTree inventory for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1494.151602] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Updating inventory in ProviderTree for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1494.166964] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Refreshing aggregate associations for resource provider 1329e087-aa78-44a2-9687-63a2b1b33fd5, aggregates: None {{(pid=61215) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1494.199231] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Refreshing trait associations for resource provider 1329e087-aa78-44a2-9687-63a2b1b33fd5, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=61215) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1494.592134] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-071b4006-dea5-4258-b4e6-eeef14a3fffe {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.602616] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a907ab61-37e4-411c-8402-99aeca4b0874 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.634497] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7693eafa-28d4-4370-aeb4-7cc2a41b6673 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.646348] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93dd770b-ce1f-4a6d-84c7-a71961df9cd7 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.663147] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1494.679663] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1494.710724] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61215) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1494.710724] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.956s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1494.767096] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f9431c3a-9db2-459b-94fe-c7872ab25c05 tempest-SecurityGroupsTestJSON-1182727218 tempest-SecurityGroupsTestJSON-1182727218-project-member] Acquiring lock "486e9745-b512-48ad-852a-166a7d63cf5e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1494.769014] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f9431c3a-9db2-459b-94fe-c7872ab25c05 tempest-SecurityGroupsTestJSON-1182727218 tempest-SecurityGroupsTestJSON-1182727218-project-member] Lock "486e9745-b512-48ad-852a-166a7d63cf5e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1496.742168] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d752b425-c63c-4530-b33e-5d201854e03b tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] Acquiring lock "5791e2e4-8ec9-4a4d-9ed9-afa829cdd6da" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1496.742168] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d752b425-c63c-4530-b33e-5d201854e03b tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] Lock "5791e2e4-8ec9-4a4d-9ed9-afa829cdd6da" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1497.062022] env[61215]: WARNING oslo_vmware.rw_handles [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1497.062022] env[61215]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1497.062022] env[61215]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1497.062022] env[61215]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1497.062022] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1497.062022] env[61215]: ERROR oslo_vmware.rw_handles response.begin() [ 1497.062022] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1497.062022] env[61215]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1497.062022] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1497.062022] env[61215]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1497.062022] env[61215]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1497.062022] env[61215]: ERROR oslo_vmware.rw_handles [ 1497.062022] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Downloaded image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to vmware_temp/13405e88-2d66-4f0b-88ab-47d17d144095/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1497.062529] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Caching image {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1497.063167] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Copying Virtual Disk [datastore1] vmware_temp/13405e88-2d66-4f0b-88ab-47d17d144095/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk to [datastore1] vmware_temp/13405e88-2d66-4f0b-88ab-47d17d144095/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk {{(pid=61215) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1497.063638] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-009687bf-bd7a-420b-ada6-2f4ab060dd6d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.072797] env[61215]: DEBUG oslo_vmware.api [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Waiting for the task: (returnval){ [ 1497.072797] env[61215]: value = "task-1690320" [ 1497.072797] env[61215]: _type = "Task" [ 1497.072797] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.081595] env[61215]: DEBUG oslo_vmware.api [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Task: {'id': task-1690320, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.590039] env[61215]: DEBUG oslo_vmware.exceptions [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Fault InvalidArgument not matched. {{(pid=61215) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1497.590039] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1497.590039] env[61215]: ERROR nova.compute.manager [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1497.590039] env[61215]: Faults: ['InvalidArgument'] [ 1497.590039] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] Traceback (most recent call last): [ 1497.590039] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1497.590039] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] yield resources [ 1497.590039] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1497.590039] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] self.driver.spawn(context, instance, image_meta, [ 1497.590453] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1497.590453] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1497.590453] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1497.590453] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] self._fetch_image_if_missing(context, vi) [ 1497.590453] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1497.590453] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] image_cache(vi, tmp_image_ds_loc) [ 1497.590453] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1497.590453] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] vm_util.copy_virtual_disk( [ 1497.590453] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1497.590453] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] session._wait_for_task(vmdk_copy_task) [ 1497.590453] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1497.590453] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] return self.wait_for_task(task_ref) [ 1497.590453] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1497.590824] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] return evt.wait() [ 1497.590824] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1497.590824] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] result = hub.switch() [ 1497.590824] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1497.590824] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] return self.greenlet.switch() [ 1497.590824] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1497.590824] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] self.f(*self.args, **self.kw) [ 1497.590824] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1497.590824] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] raise exceptions.translate_fault(task_info.error) [ 1497.590824] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1497.590824] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] Faults: ['InvalidArgument'] [ 1497.590824] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] [ 1497.591225] env[61215]: INFO nova.compute.manager [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Terminating instance [ 1497.592116] env[61215]: DEBUG oslo_concurrency.lockutils [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1497.592243] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1497.592831] env[61215]: DEBUG nova.compute.manager [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1497.593031] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1497.593259] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3bed3409-6402-4a3c-ad30-320442db5636 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.595692] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a8cd8b9-5d0d-4a7a-817f-ffcb8c5d6a41 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.604461] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Unregistering the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1497.605520] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5f0059b5-3a01-4c92-a45c-be2f9f54f6ac {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.607104] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1497.607318] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61215) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1497.608046] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29158a57-0af6-4750-bc9f-aff4599d11c3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.613456] env[61215]: DEBUG oslo_vmware.api [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Waiting for the task: (returnval){ [ 1497.613456] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]521a766c-90d2-83ea-8381-5e661489b5f5" [ 1497.613456] env[61215]: _type = "Task" [ 1497.613456] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.623236] env[61215]: DEBUG oslo_vmware.api [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]521a766c-90d2-83ea-8381-5e661489b5f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.655546] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1497.655737] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Cleaning up deleted instances {{(pid=61215) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11220}} [ 1497.669459] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] There are 0 instances to clean {{(pid=61215) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 1497.685603] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Unregistered the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1497.685863] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Deleting contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1497.686073] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Deleting the datastore file [datastore1] 805748d7-e459-4608-a02d-05ac56c48290 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1497.686705] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2d7bc2f7-db7d-4a10-8711-0765df7580d7 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.693481] env[61215]: DEBUG oslo_vmware.api [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Waiting for the task: (returnval){ [ 1497.693481] env[61215]: value = "task-1690322" [ 1497.693481] env[61215]: _type = "Task" [ 1497.693481] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.706767] env[61215]: DEBUG oslo_vmware.api [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Task: {'id': task-1690322, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.126483] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Preparing fetch location {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1498.126749] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Creating directory with path [datastore1] vmware_temp/c17bb4cf-045e-488b-ab56-0646e6a3376a/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1498.126749] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-20391d29-5f0c-474c-923f-fc16bc076475 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.141136] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Created directory with path [datastore1] vmware_temp/c17bb4cf-045e-488b-ab56-0646e6a3376a/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1498.141136] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Fetch image to [datastore1] vmware_temp/c17bb4cf-045e-488b-ab56-0646e6a3376a/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1498.141136] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to [datastore1] vmware_temp/c17bb4cf-045e-488b-ab56-0646e6a3376a/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1498.141136] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e1d0758-7c76-464c-ba0d-bfcb6f3bdd67 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.151230] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c0d5cc6-198c-4286-8ce6-3c8278d1801d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.161828] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e574bb1-8101-4f71-a584-394643f6797b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.194899] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2728b13b-0bf1-42bd-b413-d49185d5b95c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.206876] env[61215]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-fb7f67ee-b4ee-4683-bc78-7d6853f94199 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.208688] env[61215]: DEBUG oslo_vmware.api [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Task: {'id': task-1690322, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080707} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.208688] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1498.208688] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Deleted contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1498.209038] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1498.209038] env[61215]: INFO nova.compute.manager [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1498.211066] env[61215]: DEBUG nova.compute.claims [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Aborting claim: {{(pid=61215) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1498.211248] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1498.211467] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1498.229717] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1498.305220] env[61215]: DEBUG oslo_vmware.rw_handles [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c17bb4cf-045e-488b-ab56-0646e6a3376a/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1498.373055] env[61215]: DEBUG oslo_vmware.rw_handles [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Completed reading data from the image iterator. {{(pid=61215) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1498.373055] env[61215]: DEBUG oslo_vmware.rw_handles [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c17bb4cf-045e-488b-ab56-0646e6a3376a/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1498.720184] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeee56cd-4cd2-4b76-845f-d78703570647 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.730203] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dab23113-ffcd-4533-9cca-df57e58ace5a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.761808] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58fab91b-7f2e-41f6-bec8-67f5c5740e01 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.769490] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8141e73-91f5-43e6-9ceb-603fe75b7c23 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.785819] env[61215]: DEBUG nova.compute.provider_tree [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1498.804031] env[61215]: DEBUG nova.scheduler.client.report [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1498.820045] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.608s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1498.820427] env[61215]: ERROR nova.compute.manager [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1498.820427] env[61215]: Faults: ['InvalidArgument'] [ 1498.820427] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] Traceback (most recent call last): [ 1498.820427] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1498.820427] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] self.driver.spawn(context, instance, image_meta, [ 1498.820427] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1498.820427] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1498.820427] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1498.820427] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] self._fetch_image_if_missing(context, vi) [ 1498.820427] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1498.820427] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] image_cache(vi, tmp_image_ds_loc) [ 1498.820427] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1498.820774] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] vm_util.copy_virtual_disk( [ 1498.820774] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1498.820774] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] session._wait_for_task(vmdk_copy_task) [ 1498.820774] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1498.820774] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] return self.wait_for_task(task_ref) [ 1498.820774] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1498.820774] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] return evt.wait() [ 1498.820774] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1498.820774] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] result = hub.switch() [ 1498.820774] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1498.820774] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] return self.greenlet.switch() [ 1498.820774] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1498.820774] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] self.f(*self.args, **self.kw) [ 1498.821117] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1498.821117] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] raise exceptions.translate_fault(task_info.error) [ 1498.821117] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1498.821117] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] Faults: ['InvalidArgument'] [ 1498.821117] env[61215]: ERROR nova.compute.manager [instance: 805748d7-e459-4608-a02d-05ac56c48290] [ 1498.821252] env[61215]: DEBUG nova.compute.utils [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] VimFaultException {{(pid=61215) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1498.822777] env[61215]: DEBUG nova.compute.manager [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Build of instance 805748d7-e459-4608-a02d-05ac56c48290 was re-scheduled: A specified parameter was not correct: fileType [ 1498.822777] env[61215]: Faults: ['InvalidArgument'] {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1498.823587] env[61215]: DEBUG nova.compute.manager [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Unplugging VIFs for instance {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1498.823587] env[61215]: DEBUG nova.compute.manager [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1498.823587] env[61215]: DEBUG nova.compute.manager [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1498.823770] env[61215]: DEBUG nova.network.neutron [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1499.757014] env[61215]: DEBUG nova.network.neutron [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1499.771069] env[61215]: INFO nova.compute.manager [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Took 0.95 seconds to deallocate network for instance. [ 1499.918094] env[61215]: INFO nova.scheduler.client.report [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Deleted allocations for instance 805748d7-e459-4608-a02d-05ac56c48290 [ 1499.947168] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a4586dde-b7b9-4b6a-b5af-66b2e81e06a6 tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Lock "805748d7-e459-4608-a02d-05ac56c48290" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 251.785s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1499.948627] env[61215]: DEBUG oslo_concurrency.lockutils [None req-4f6a192d-32d6-467f-96a6-eaad6e73247b tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Lock "805748d7-e459-4608-a02d-05ac56c48290" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 54.106s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1499.949117] env[61215]: DEBUG oslo_concurrency.lockutils [None req-4f6a192d-32d6-467f-96a6-eaad6e73247b tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Acquiring lock "805748d7-e459-4608-a02d-05ac56c48290-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1499.950293] env[61215]: DEBUG oslo_concurrency.lockutils [None req-4f6a192d-32d6-467f-96a6-eaad6e73247b tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Lock "805748d7-e459-4608-a02d-05ac56c48290-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1499.950594] env[61215]: DEBUG oslo_concurrency.lockutils [None req-4f6a192d-32d6-467f-96a6-eaad6e73247b tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Lock "805748d7-e459-4608-a02d-05ac56c48290-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1499.952688] env[61215]: INFO nova.compute.manager [None req-4f6a192d-32d6-467f-96a6-eaad6e73247b tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Terminating instance [ 1499.955737] env[61215]: DEBUG nova.compute.manager [None req-4f6a192d-32d6-467f-96a6-eaad6e73247b tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1499.955737] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-4f6a192d-32d6-467f-96a6-eaad6e73247b tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1499.956582] env[61215]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8ac98d6a-6fc9-4085-a98a-05556c5545c4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.966166] env[61215]: DEBUG nova.compute.manager [None req-f97f9e6a-c3ff-48ca-9eae-811e2bdd73d5 tempest-ServersAdminTestJSON-1520134115 tempest-ServersAdminTestJSON-1520134115-project-member] [instance: 44f29d5d-46d6-433a-972b-f971a04200e1] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1499.979943] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6d1f816-c5a1-407e-a71a-aeb649c12774 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.017877] env[61215]: WARNING nova.virt.vmwareapi.vmops [None req-4f6a192d-32d6-467f-96a6-eaad6e73247b tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 805748d7-e459-4608-a02d-05ac56c48290 could not be found. [ 1500.018099] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-4f6a192d-32d6-467f-96a6-eaad6e73247b tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1500.019148] env[61215]: INFO nova.compute.manager [None req-4f6a192d-32d6-467f-96a6-eaad6e73247b tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Took 0.06 seconds to destroy the instance on the hypervisor. [ 1500.019148] env[61215]: DEBUG oslo.service.loopingcall [None req-4f6a192d-32d6-467f-96a6-eaad6e73247b tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1500.019148] env[61215]: DEBUG nova.compute.manager [-] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1500.019148] env[61215]: DEBUG nova.network.neutron [-] [instance: 805748d7-e459-4608-a02d-05ac56c48290] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1500.024590] env[61215]: DEBUG nova.compute.manager [None req-f97f9e6a-c3ff-48ca-9eae-811e2bdd73d5 tempest-ServersAdminTestJSON-1520134115 tempest-ServersAdminTestJSON-1520134115-project-member] [instance: 44f29d5d-46d6-433a-972b-f971a04200e1] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1500.059855] env[61215]: DEBUG nova.network.neutron [-] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1500.068636] env[61215]: INFO nova.compute.manager [-] [instance: 805748d7-e459-4608-a02d-05ac56c48290] Took 0.05 seconds to deallocate network for instance. [ 1500.075187] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f97f9e6a-c3ff-48ca-9eae-811e2bdd73d5 tempest-ServersAdminTestJSON-1520134115 tempest-ServersAdminTestJSON-1520134115-project-member] Lock "44f29d5d-46d6-433a-972b-f971a04200e1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 217.213s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1500.094387] env[61215]: DEBUG nova.compute.manager [None req-dc95e430-386a-4245-b0cb-b9ebca5ef646 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 8d807f25-01ea-42b2-b5ed-b8ff2b6c39ad] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1500.160384] env[61215]: DEBUG nova.compute.manager [None req-dc95e430-386a-4245-b0cb-b9ebca5ef646 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 8d807f25-01ea-42b2-b5ed-b8ff2b6c39ad] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1500.197106] env[61215]: DEBUG oslo_concurrency.lockutils [None req-dc95e430-386a-4245-b0cb-b9ebca5ef646 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Lock "8d807f25-01ea-42b2-b5ed-b8ff2b6c39ad" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 215.782s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1500.208809] env[61215]: DEBUG nova.compute.manager [None req-8c813b26-2396-4945-8d07-29e004608a22 tempest-ServersTestManualDisk-677728471 tempest-ServersTestManualDisk-677728471-project-member] [instance: 0ad2b135-a40e-4353-a524-1d66435197bf] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1500.247524] env[61215]: DEBUG nova.compute.manager [None req-8c813b26-2396-4945-8d07-29e004608a22 tempest-ServersTestManualDisk-677728471 tempest-ServersTestManualDisk-677728471-project-member] [instance: 0ad2b135-a40e-4353-a524-1d66435197bf] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1500.257853] env[61215]: DEBUG oslo_concurrency.lockutils [None req-4f6a192d-32d6-467f-96a6-eaad6e73247b tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Lock "805748d7-e459-4608-a02d-05ac56c48290" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.309s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1500.301061] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8c813b26-2396-4945-8d07-29e004608a22 tempest-ServersTestManualDisk-677728471 tempest-ServersTestManualDisk-677728471-project-member] Lock "0ad2b135-a40e-4353-a524-1d66435197bf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 215.496s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1500.315135] env[61215]: DEBUG nova.compute.manager [None req-388f4f53-e221-4c0f-b93f-56afe40b7fb2 tempest-SecurityGroupsTestJSON-1182727218 tempest-SecurityGroupsTestJSON-1182727218-project-member] [instance: 7d9fbfe1-a62e-41e2-8736-61b2f895598d] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1500.346154] env[61215]: DEBUG nova.compute.manager [None req-388f4f53-e221-4c0f-b93f-56afe40b7fb2 tempest-SecurityGroupsTestJSON-1182727218 tempest-SecurityGroupsTestJSON-1182727218-project-member] [instance: 7d9fbfe1-a62e-41e2-8736-61b2f895598d] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1500.375381] env[61215]: DEBUG oslo_concurrency.lockutils [None req-388f4f53-e221-4c0f-b93f-56afe40b7fb2 tempest-SecurityGroupsTestJSON-1182727218 tempest-SecurityGroupsTestJSON-1182727218-project-member] Lock "7d9fbfe1-a62e-41e2-8736-61b2f895598d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 215.081s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1500.391436] env[61215]: DEBUG nova.compute.manager [None req-b4d758fa-00fe-4978-b44f-555600c0d47c tempest-ServersAdminTestJSON-1520134115 tempest-ServersAdminTestJSON-1520134115-project-member] [instance: 389bf40e-6d3f-4b37-a6a7-6b18a9281da7] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1500.425755] env[61215]: DEBUG nova.compute.manager [None req-b4d758fa-00fe-4978-b44f-555600c0d47c tempest-ServersAdminTestJSON-1520134115 tempest-ServersAdminTestJSON-1520134115-project-member] [instance: 389bf40e-6d3f-4b37-a6a7-6b18a9281da7] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1500.452373] env[61215]: DEBUG oslo_concurrency.lockutils [None req-b4d758fa-00fe-4978-b44f-555600c0d47c tempest-ServersAdminTestJSON-1520134115 tempest-ServersAdminTestJSON-1520134115-project-member] Lock "389bf40e-6d3f-4b37-a6a7-6b18a9281da7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 214.527s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1500.468945] env[61215]: DEBUG nova.compute.manager [None req-3be27988-e200-47ae-a99e-eac1ae81b962 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 666eca1f-edf0-445d-99f3-428547f01746] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1500.496351] env[61215]: DEBUG nova.compute.manager [None req-3be27988-e200-47ae-a99e-eac1ae81b962 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 666eca1f-edf0-445d-99f3-428547f01746] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1500.534757] env[61215]: DEBUG oslo_concurrency.lockutils [None req-3be27988-e200-47ae-a99e-eac1ae81b962 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Lock "666eca1f-edf0-445d-99f3-428547f01746" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 213.624s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1500.544425] env[61215]: DEBUG nova.compute.manager [None req-2b4c2576-ba83-46eb-95bf-ce7cddf5de8d tempest-ListServersNegativeTestJSON-786174427 tempest-ListServersNegativeTestJSON-786174427-project-member] [instance: 9f4a9d38-7536-4804-9fde-0b14a18999b3] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1500.572167] env[61215]: DEBUG nova.compute.manager [None req-2b4c2576-ba83-46eb-95bf-ce7cddf5de8d tempest-ListServersNegativeTestJSON-786174427 tempest-ListServersNegativeTestJSON-786174427-project-member] [instance: 9f4a9d38-7536-4804-9fde-0b14a18999b3] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1500.603824] env[61215]: DEBUG oslo_concurrency.lockutils [None req-2b4c2576-ba83-46eb-95bf-ce7cddf5de8d tempest-ListServersNegativeTestJSON-786174427 tempest-ListServersNegativeTestJSON-786174427-project-member] Lock "9f4a9d38-7536-4804-9fde-0b14a18999b3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 213.634s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1500.617050] env[61215]: DEBUG nova.compute.manager [None req-2b4c2576-ba83-46eb-95bf-ce7cddf5de8d tempest-ListServersNegativeTestJSON-786174427 tempest-ListServersNegativeTestJSON-786174427-project-member] [instance: d99f8ee6-768f-4775-b07e-c84536e7f659] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1500.643589] env[61215]: DEBUG nova.compute.manager [None req-2b4c2576-ba83-46eb-95bf-ce7cddf5de8d tempest-ListServersNegativeTestJSON-786174427 tempest-ListServersNegativeTestJSON-786174427-project-member] [instance: d99f8ee6-768f-4775-b07e-c84536e7f659] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1500.682012] env[61215]: DEBUG oslo_concurrency.lockutils [None req-2b4c2576-ba83-46eb-95bf-ce7cddf5de8d tempest-ListServersNegativeTestJSON-786174427 tempest-ListServersNegativeTestJSON-786174427-project-member] Lock "d99f8ee6-768f-4775-b07e-c84536e7f659" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 213.673s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1500.699157] env[61215]: DEBUG nova.compute.manager [None req-2b4c2576-ba83-46eb-95bf-ce7cddf5de8d tempest-ListServersNegativeTestJSON-786174427 tempest-ListServersNegativeTestJSON-786174427-project-member] [instance: 1d14c483-5775-4eda-9173-67b02bd97889] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1500.738172] env[61215]: DEBUG nova.compute.manager [None req-2b4c2576-ba83-46eb-95bf-ce7cddf5de8d tempest-ListServersNegativeTestJSON-786174427 tempest-ListServersNegativeTestJSON-786174427-project-member] [instance: 1d14c483-5775-4eda-9173-67b02bd97889] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1500.773995] env[61215]: DEBUG oslo_concurrency.lockutils [None req-2b4c2576-ba83-46eb-95bf-ce7cddf5de8d tempest-ListServersNegativeTestJSON-786174427 tempest-ListServersNegativeTestJSON-786174427-project-member] Lock "1d14c483-5775-4eda-9173-67b02bd97889" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 213.718s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1500.796817] env[61215]: DEBUG nova.compute.manager [None req-74e4cba9-3320-497e-8550-20a7981f58e3 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] [instance: f30dda39-422f-433d-9684-f2c7486271fa] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1500.828224] env[61215]: DEBUG nova.compute.manager [None req-74e4cba9-3320-497e-8550-20a7981f58e3 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] [instance: f30dda39-422f-433d-9684-f2c7486271fa] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1500.853987] env[61215]: DEBUG oslo_concurrency.lockutils [None req-74e4cba9-3320-497e-8550-20a7981f58e3 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] Lock "f30dda39-422f-433d-9684-f2c7486271fa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 211.539s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1500.871122] env[61215]: DEBUG nova.compute.manager [None req-c5a0e0e6-66f3-4502-9c80-273693e532e2 tempest-AttachInterfacesUnderV243Test-1052085376 tempest-AttachInterfacesUnderV243Test-1052085376-project-member] [instance: 49d99eb4-905c-409c-97e2-001801f61b38] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1500.918905] env[61215]: DEBUG nova.compute.manager [None req-c5a0e0e6-66f3-4502-9c80-273693e532e2 tempest-AttachInterfacesUnderV243Test-1052085376 tempest-AttachInterfacesUnderV243Test-1052085376-project-member] [instance: 49d99eb4-905c-409c-97e2-001801f61b38] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1500.946520] env[61215]: DEBUG oslo_concurrency.lockutils [None req-c5a0e0e6-66f3-4502-9c80-273693e532e2 tempest-AttachInterfacesUnderV243Test-1052085376 tempest-AttachInterfacesUnderV243Test-1052085376-project-member] Lock "49d99eb4-905c-409c-97e2-001801f61b38" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 209.762s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1500.960674] env[61215]: DEBUG nova.compute.manager [None req-b4dbf638-ec9f-4a92-aad5-bb49f7954b93 tempest-FloatingIPsAssociationTestJSON-841876999 tempest-FloatingIPsAssociationTestJSON-841876999-project-member] [instance: 1e53c769-1b6e-4e9b-805d-9ef8d8db4813] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1500.991743] env[61215]: DEBUG nova.compute.manager [None req-b4dbf638-ec9f-4a92-aad5-bb49f7954b93 tempest-FloatingIPsAssociationTestJSON-841876999 tempest-FloatingIPsAssociationTestJSON-841876999-project-member] [instance: 1e53c769-1b6e-4e9b-805d-9ef8d8db4813] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1501.021831] env[61215]: DEBUG oslo_concurrency.lockutils [None req-b4dbf638-ec9f-4a92-aad5-bb49f7954b93 tempest-FloatingIPsAssociationTestJSON-841876999 tempest-FloatingIPsAssociationTestJSON-841876999-project-member] Lock "1e53c769-1b6e-4e9b-805d-9ef8d8db4813" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 206.316s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1501.036658] env[61215]: DEBUG nova.compute.manager [None req-93631d84-ddd4-4b4f-b8f3-7d004f6de9aa tempest-ServersAdmin275Test-68190844 tempest-ServersAdmin275Test-68190844-project-member] [instance: 846e6d6a-dc09-4b7e-81d2-3d2023d945c5] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1501.067905] env[61215]: DEBUG nova.compute.manager [None req-93631d84-ddd4-4b4f-b8f3-7d004f6de9aa tempest-ServersAdmin275Test-68190844 tempest-ServersAdmin275Test-68190844-project-member] [instance: 846e6d6a-dc09-4b7e-81d2-3d2023d945c5] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1501.104197] env[61215]: DEBUG oslo_concurrency.lockutils [None req-93631d84-ddd4-4b4f-b8f3-7d004f6de9aa tempest-ServersAdmin275Test-68190844 tempest-ServersAdmin275Test-68190844-project-member] Lock "846e6d6a-dc09-4b7e-81d2-3d2023d945c5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 205.264s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1501.115776] env[61215]: DEBUG nova.compute.manager [None req-8b386171-4c7c-4fc6-80d4-6c4db91b7f6c tempest-InstanceActionsTestJSON-1893819667 tempest-InstanceActionsTestJSON-1893819667-project-member] [instance: 1678b6d3-d4ef-4497-a240-b43a4837d9d5] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1501.146827] env[61215]: DEBUG nova.compute.manager [None req-8b386171-4c7c-4fc6-80d4-6c4db91b7f6c tempest-InstanceActionsTestJSON-1893819667 tempest-InstanceActionsTestJSON-1893819667-project-member] [instance: 1678b6d3-d4ef-4497-a240-b43a4837d9d5] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1501.177618] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8b386171-4c7c-4fc6-80d4-6c4db91b7f6c tempest-InstanceActionsTestJSON-1893819667 tempest-InstanceActionsTestJSON-1893819667-project-member] Lock "1678b6d3-d4ef-4497-a240-b43a4837d9d5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.573s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1501.192895] env[61215]: DEBUG nova.compute.manager [None req-9911a0e8-8a3e-48e2-a548-b520b42ab8f3 tempest-ServerActionsTestOtherB-954961186 tempest-ServerActionsTestOtherB-954961186-project-member] [instance: 02249c8d-c2b8-4e58-87eb-aecab70177bf] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1501.230413] env[61215]: DEBUG nova.compute.manager [None req-9911a0e8-8a3e-48e2-a548-b520b42ab8f3 tempest-ServerActionsTestOtherB-954961186 tempest-ServerActionsTestOtherB-954961186-project-member] [instance: 02249c8d-c2b8-4e58-87eb-aecab70177bf] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1501.261036] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9911a0e8-8a3e-48e2-a548-b520b42ab8f3 tempest-ServerActionsTestOtherB-954961186 tempest-ServerActionsTestOtherB-954961186-project-member] Lock "02249c8d-c2b8-4e58-87eb-aecab70177bf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.167s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1501.275742] env[61215]: DEBUG nova.compute.manager [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1501.345700] env[61215]: DEBUG oslo_concurrency.lockutils [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1501.346163] env[61215]: DEBUG oslo_concurrency.lockutils [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.002s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1501.348187] env[61215]: INFO nova.compute.claims [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1501.723549] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-170a0a9b-4359-4cfe-9563-4787a2eae1da {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.734122] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bddfed5-9fc4-4dbc-a370-3f4335a519b2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.769415] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eb50ca0-c99c-4b73-b0c6-5a0a47b66231 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.777104] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2171dee4-a0ef-4f41-9e7e-ea86abc68132 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.791419] env[61215]: DEBUG nova.compute.provider_tree [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1501.809248] env[61215]: DEBUG nova.scheduler.client.report [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1501.838883] env[61215]: DEBUG oslo_concurrency.lockutils [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.493s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1501.839458] env[61215]: DEBUG nova.compute.manager [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Start building networks asynchronously for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1501.890839] env[61215]: DEBUG nova.compute.utils [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Using /dev/sd instead of None {{(pid=61215) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1501.892348] env[61215]: DEBUG nova.compute.manager [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Allocating IP information in the background. {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1501.892453] env[61215]: DEBUG nova.network.neutron [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] allocate_for_instance() {{(pid=61215) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1501.904437] env[61215]: DEBUG nova.compute.manager [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Start building block device mappings for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1501.990106] env[61215]: DEBUG nova.compute.manager [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Start spawning the instance on the hypervisor. {{(pid=61215) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1502.025717] env[61215]: DEBUG nova.virt.hardware [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:05:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:04:45Z,direct_url=,disk_format='vmdk',id=e91f0c25-9ff9-4937-8440-f47cfb2028bc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9bc3f57c82894c5d9e08b66e77a25dc5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:04:46Z,virtual_size=,visibility=), allow threads: False {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1502.025997] env[61215]: DEBUG nova.virt.hardware [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Flavor limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1502.026259] env[61215]: DEBUG nova.virt.hardware [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Image limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1502.026522] env[61215]: DEBUG nova.virt.hardware [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Flavor pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1502.027099] env[61215]: DEBUG nova.virt.hardware [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Image pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1502.027292] env[61215]: DEBUG nova.virt.hardware [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1502.027561] env[61215]: DEBUG nova.virt.hardware [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1502.027793] env[61215]: DEBUG nova.virt.hardware [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1502.027950] env[61215]: DEBUG nova.virt.hardware [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Got 1 possible topologies {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1502.028138] env[61215]: DEBUG nova.virt.hardware [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1502.028350] env[61215]: DEBUG nova.virt.hardware [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1502.029547] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4f543cc-02ae-44a3-a154-3aeda714cee1 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.039848] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cd51cce-5bc0-4cd0-9888-0da3f38022cc {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.073921] env[61215]: DEBUG nova.policy [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e7451abb75ba4168b864a5add19c7734', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '700177b2d3034f9e8d00afa198f3fa48', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61215) authorize /opt/stack/nova/nova/policy.py:203}} [ 1503.358115] env[61215]: DEBUG nova.network.neutron [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Successfully created port: 4d1d1f98-d8a0-49c8-932b-29460fc755b4 {{(pid=61215) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1504.478249] env[61215]: DEBUG oslo_concurrency.lockutils [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Acquiring lock "2e186217-c1e1-40c6-8d84-988f35f6b93d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1504.478685] env[61215]: DEBUG oslo_concurrency.lockutils [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Lock "2e186217-c1e1-40c6-8d84-988f35f6b93d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1504.481883] env[61215]: DEBUG nova.network.neutron [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Successfully updated port: 4d1d1f98-d8a0-49c8-932b-29460fc755b4 {{(pid=61215) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1504.510799] env[61215]: DEBUG oslo_concurrency.lockutils [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Acquiring lock "refresh_cache-fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1504.510799] env[61215]: DEBUG oslo_concurrency.lockutils [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Acquired lock "refresh_cache-fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1504.510799] env[61215]: DEBUG nova.network.neutron [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1504.570308] env[61215]: DEBUG nova.network.neutron [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1504.845830] env[61215]: DEBUG nova.network.neutron [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Updating instance_info_cache with network_info: [{"id": "4d1d1f98-d8a0-49c8-932b-29460fc755b4", "address": "fa:16:3e:df:ce:18", "network": {"id": "431850f7-328a-4000-844c-49284ae74123", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1058107245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "700177b2d3034f9e8d00afa198f3fa48", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "357d2811-e990-4985-9f9e-b158d10d3699", "external-id": "nsx-vlan-transportzone-641", "segmentation_id": 641, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d1d1f98-d8", "ovs_interfaceid": "4d1d1f98-d8a0-49c8-932b-29460fc755b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1504.861966] env[61215]: DEBUG oslo_concurrency.lockutils [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Releasing lock "refresh_cache-fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1504.862311] env[61215]: DEBUG nova.compute.manager [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Instance network_info: |[{"id": "4d1d1f98-d8a0-49c8-932b-29460fc755b4", "address": "fa:16:3e:df:ce:18", "network": {"id": "431850f7-328a-4000-844c-49284ae74123", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1058107245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "700177b2d3034f9e8d00afa198f3fa48", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "357d2811-e990-4985-9f9e-b158d10d3699", "external-id": "nsx-vlan-transportzone-641", "segmentation_id": 641, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d1d1f98-d8", "ovs_interfaceid": "4d1d1f98-d8a0-49c8-932b-29460fc755b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1504.862732] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:df:ce:18', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '357d2811-e990-4985-9f9e-b158d10d3699', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4d1d1f98-d8a0-49c8-932b-29460fc755b4', 'vif_model': 'vmxnet3'}] {{(pid=61215) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1504.872979] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Creating folder: Project (700177b2d3034f9e8d00afa198f3fa48). Parent ref: group-v352463. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1504.874158] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-597eb937-9170-474e-adb5-56634b6194f0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.886359] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Created folder: Project (700177b2d3034f9e8d00afa198f3fa48) in parent group-v352463. [ 1504.886606] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Creating folder: Instances. Parent ref: group-v352510. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1504.886912] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c9e720fc-9c45-4a5f-95ef-bd73bb651531 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.897695] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Created folder: Instances in parent group-v352510. [ 1504.897948] env[61215]: DEBUG oslo.service.loopingcall [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1504.898158] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Creating VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1504.898364] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8e859277-313a-4953-8640-472124038c4e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.928750] env[61215]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1504.928750] env[61215]: value = "task-1690325" [ 1504.928750] env[61215]: _type = "Task" [ 1504.928750] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1504.936557] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690325, 'name': CreateVM_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.428508] env[61215]: DEBUG nova.compute.manager [req-dc6b8b50-0b3f-480e-9d9e-51ff9eb95a7e req-3c184e24-7a7b-4668-84e7-eac618e49a60 service nova] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Received event network-vif-plugged-4d1d1f98-d8a0-49c8-932b-29460fc755b4 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1505.428508] env[61215]: DEBUG oslo_concurrency.lockutils [req-dc6b8b50-0b3f-480e-9d9e-51ff9eb95a7e req-3c184e24-7a7b-4668-84e7-eac618e49a60 service nova] Acquiring lock "fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1505.428508] env[61215]: DEBUG oslo_concurrency.lockutils [req-dc6b8b50-0b3f-480e-9d9e-51ff9eb95a7e req-3c184e24-7a7b-4668-84e7-eac618e49a60 service nova] Lock "fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1505.428508] env[61215]: DEBUG oslo_concurrency.lockutils [req-dc6b8b50-0b3f-480e-9d9e-51ff9eb95a7e req-3c184e24-7a7b-4668-84e7-eac618e49a60 service nova] Lock "fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1505.428848] env[61215]: DEBUG nova.compute.manager [req-dc6b8b50-0b3f-480e-9d9e-51ff9eb95a7e req-3c184e24-7a7b-4668-84e7-eac618e49a60 service nova] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] No waiting events found dispatching network-vif-plugged-4d1d1f98-d8a0-49c8-932b-29460fc755b4 {{(pid=61215) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1505.428848] env[61215]: WARNING nova.compute.manager [req-dc6b8b50-0b3f-480e-9d9e-51ff9eb95a7e req-3c184e24-7a7b-4668-84e7-eac618e49a60 service nova] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Received unexpected event network-vif-plugged-4d1d1f98-d8a0-49c8-932b-29460fc755b4 for instance with vm_state building and task_state spawning. [ 1505.443383] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690325, 'name': CreateVM_Task, 'duration_secs': 0.375522} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1505.445011] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Created VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1505.445744] env[61215]: DEBUG oslo_concurrency.lockutils [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1505.445905] env[61215]: DEBUG oslo_concurrency.lockutils [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1505.446267] env[61215]: DEBUG oslo_concurrency.lockutils [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1505.446523] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa39a15e-ba2d-4f11-916b-9fa900b55ea2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.452225] env[61215]: DEBUG oslo_vmware.api [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Waiting for the task: (returnval){ [ 1505.452225] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]5247b337-b5a0-759e-cd64-4e58223ab49d" [ 1505.452225] env[61215]: _type = "Task" [ 1505.452225] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.462498] env[61215]: DEBUG oslo_vmware.api [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5247b337-b5a0-759e-cd64-4e58223ab49d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.965403] env[61215]: DEBUG oslo_concurrency.lockutils [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1505.966229] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Processing image e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1505.966724] env[61215]: DEBUG oslo_concurrency.lockutils [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1506.032921] env[61215]: DEBUG oslo_concurrency.lockutils [None req-48c03b85-d937-406e-a729-a9d9a3ce5f6c tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Acquiring lock "fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1506.837439] env[61215]: DEBUG oslo_concurrency.lockutils [None req-16cfbbd1-b5a4-4ff6-b608-afa1fa993c4c tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Acquiring lock "ad606ad3-d291-4a71-91d5-850a9795f301" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1506.837681] env[61215]: DEBUG oslo_concurrency.lockutils [None req-16cfbbd1-b5a4-4ff6-b608-afa1fa993c4c tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Lock "ad606ad3-d291-4a71-91d5-850a9795f301" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1508.084348] env[61215]: DEBUG nova.compute.manager [req-04b27dbd-a4f1-426f-bec0-9d86ecc862e2 req-e9c221cc-0f30-4cd2-9381-5b000a360301 service nova] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Received event network-changed-4d1d1f98-d8a0-49c8-932b-29460fc755b4 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1508.084687] env[61215]: DEBUG nova.compute.manager [req-04b27dbd-a4f1-426f-bec0-9d86ecc862e2 req-e9c221cc-0f30-4cd2-9381-5b000a360301 service nova] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Refreshing instance network info cache due to event network-changed-4d1d1f98-d8a0-49c8-932b-29460fc755b4. {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 1508.084950] env[61215]: DEBUG oslo_concurrency.lockutils [req-04b27dbd-a4f1-426f-bec0-9d86ecc862e2 req-e9c221cc-0f30-4cd2-9381-5b000a360301 service nova] Acquiring lock "refresh_cache-fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1508.085142] env[61215]: DEBUG oslo_concurrency.lockutils [req-04b27dbd-a4f1-426f-bec0-9d86ecc862e2 req-e9c221cc-0f30-4cd2-9381-5b000a360301 service nova] Acquired lock "refresh_cache-fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1508.085403] env[61215]: DEBUG nova.network.neutron [req-04b27dbd-a4f1-426f-bec0-9d86ecc862e2 req-e9c221cc-0f30-4cd2-9381-5b000a360301 service nova] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Refreshing network info cache for port 4d1d1f98-d8a0-49c8-932b-29460fc755b4 {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1508.568583] env[61215]: DEBUG nova.network.neutron [req-04b27dbd-a4f1-426f-bec0-9d86ecc862e2 req-e9c221cc-0f30-4cd2-9381-5b000a360301 service nova] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Updated VIF entry in instance network info cache for port 4d1d1f98-d8a0-49c8-932b-29460fc755b4. {{(pid=61215) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1508.568891] env[61215]: DEBUG nova.network.neutron [req-04b27dbd-a4f1-426f-bec0-9d86ecc862e2 req-e9c221cc-0f30-4cd2-9381-5b000a360301 service nova] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Updating instance_info_cache with network_info: [{"id": "4d1d1f98-d8a0-49c8-932b-29460fc755b4", "address": "fa:16:3e:df:ce:18", "network": {"id": "431850f7-328a-4000-844c-49284ae74123", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1058107245-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "700177b2d3034f9e8d00afa198f3fa48", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "357d2811-e990-4985-9f9e-b158d10d3699", "external-id": "nsx-vlan-transportzone-641", "segmentation_id": 641, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d1d1f98-d8", "ovs_interfaceid": "4d1d1f98-d8a0-49c8-932b-29460fc755b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1508.581897] env[61215]: DEBUG oslo_concurrency.lockutils [req-04b27dbd-a4f1-426f-bec0-9d86ecc862e2 req-e9c221cc-0f30-4cd2-9381-5b000a360301 service nova] Releasing lock "refresh_cache-fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1511.927900] env[61215]: DEBUG oslo_concurrency.lockutils [None req-959678f1-500b-45b6-8b58-c7c2e421a8f7 tempest-ServerRescueNegativeTestJSON-558999008 tempest-ServerRescueNegativeTestJSON-558999008-project-member] Acquiring lock "d8f5d964-fbcd-45fc-acb7-a0ca9e01b615" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1511.927900] env[61215]: DEBUG oslo_concurrency.lockutils [None req-959678f1-500b-45b6-8b58-c7c2e421a8f7 tempest-ServerRescueNegativeTestJSON-558999008 tempest-ServerRescueNegativeTestJSON-558999008-project-member] Lock "d8f5d964-fbcd-45fc-acb7-a0ca9e01b615" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1513.903601] env[61215]: DEBUG oslo_concurrency.lockutils [None req-6a9817d3-fe79-4dc8-9fe4-947c54d30f8c tempest-ServerRescueNegativeTestJSON-558999008 tempest-ServerRescueNegativeTestJSON-558999008-project-member] Acquiring lock "4cdbd5ad-fbbb-4cee-811c-60cf47094cad" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1513.903999] env[61215]: DEBUG oslo_concurrency.lockutils [None req-6a9817d3-fe79-4dc8-9fe4-947c54d30f8c tempest-ServerRescueNegativeTestJSON-558999008 tempest-ServerRescueNegativeTestJSON-558999008-project-member] Lock "4cdbd5ad-fbbb-4cee-811c-60cf47094cad" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1516.339257] env[61215]: DEBUG oslo_concurrency.lockutils [None req-b9ce1d83-442e-4660-bbcb-07ddff5976ce tempest-ListImageFiltersTestJSON-577297259 tempest-ListImageFiltersTestJSON-577297259-project-member] Acquiring lock "dfbd190d-8565-4272-8320-eef68d00b9a1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1516.340265] env[61215]: DEBUG oslo_concurrency.lockutils [None req-b9ce1d83-442e-4660-bbcb-07ddff5976ce tempest-ListImageFiltersTestJSON-577297259 tempest-ListImageFiltersTestJSON-577297259-project-member] Lock "dfbd190d-8565-4272-8320-eef68d00b9a1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1517.357534] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0d63e8a7-bad5-45a9-8834-9e3985e0b3cd tempest-ListImageFiltersTestJSON-577297259 tempest-ListImageFiltersTestJSON-577297259-project-member] Acquiring lock "26c74fa5-69fd-4a83-9ef7-8ed4103b5460" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1517.357534] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0d63e8a7-bad5-45a9-8834-9e3985e0b3cd tempest-ListImageFiltersTestJSON-577297259 tempest-ListImageFiltersTestJSON-577297259-project-member] Lock "26c74fa5-69fd-4a83-9ef7-8ed4103b5460" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1523.342189] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d031ed1d-bd02-4d8d-b74a-f770e7dcbf8e tempest-ServerGroupTestJSON-1949014661 tempest-ServerGroupTestJSON-1949014661-project-member] Acquiring lock "008007b0-7ff0-4711-80dc-707efea20e75" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1523.342497] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d031ed1d-bd02-4d8d-b74a-f770e7dcbf8e tempest-ServerGroupTestJSON-1949014661 tempest-ServerGroupTestJSON-1949014661-project-member] Lock "008007b0-7ff0-4711-80dc-707efea20e75" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1525.221800] env[61215]: DEBUG oslo_concurrency.lockutils [None req-95b00ab7-71fb-4c3c-b578-d1cd5d96d79f tempest-ServerAddressesNegativeTestJSON-2117665619 tempest-ServerAddressesNegativeTestJSON-2117665619-project-member] Acquiring lock "a0b2bf99-e82a-4866-844b-0e5ed758e78c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1525.221800] env[61215]: DEBUG oslo_concurrency.lockutils [None req-95b00ab7-71fb-4c3c-b578-d1cd5d96d79f tempest-ServerAddressesNegativeTestJSON-2117665619 tempest-ServerAddressesNegativeTestJSON-2117665619-project-member] Lock "a0b2bf99-e82a-4866-844b-0e5ed758e78c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1525.413840] env[61215]: DEBUG oslo_concurrency.lockutils [None req-54e97152-2d35-40ea-ba8c-33890671b22d tempest-AttachInterfacesTestJSON-1825426080 tempest-AttachInterfacesTestJSON-1825426080-project-member] Acquiring lock "953e0804-8220-4fb0-a4af-8956be949a54" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1525.413840] env[61215]: DEBUG oslo_concurrency.lockutils [None req-54e97152-2d35-40ea-ba8c-33890671b22d tempest-AttachInterfacesTestJSON-1825426080 tempest-AttachInterfacesTestJSON-1825426080-project-member] Lock "953e0804-8220-4fb0-a4af-8956be949a54" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1527.067148] env[61215]: DEBUG oslo_concurrency.lockutils [None req-c4f08818-3501-4caf-b419-d0f5e404a18d tempest-ServerActionsV293TestJSON-1175145830 tempest-ServerActionsV293TestJSON-1175145830-project-member] Acquiring lock "3cf6ec8a-489b-4102-9fcb-581587345fc5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1527.067443] env[61215]: DEBUG oslo_concurrency.lockutils [None req-c4f08818-3501-4caf-b419-d0f5e404a18d tempest-ServerActionsV293TestJSON-1175145830 tempest-ServerActionsV293TestJSON-1175145830-project-member] Lock "3cf6ec8a-489b-4102-9fcb-581587345fc5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1528.163788] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0c7a8989-bfec-4fc2-b942-5dfa56facefd tempest-ServersNegativeTestJSON-1499202249 tempest-ServersNegativeTestJSON-1499202249-project-member] Acquiring lock "772cf4c4-cdc2-4a00-8891-908b31827a7b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1528.163788] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0c7a8989-bfec-4fc2-b942-5dfa56facefd tempest-ServersNegativeTestJSON-1499202249 tempest-ServersNegativeTestJSON-1499202249-project-member] Lock "772cf4c4-cdc2-4a00-8891-908b31827a7b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1529.493500] env[61215]: DEBUG oslo_concurrency.lockutils [None req-fd7ba7c5-9f1d-43a3-bd18-ae56a3b70b3b tempest-ImagesOneServerNegativeTestJSON-1147663856 tempest-ImagesOneServerNegativeTestJSON-1147663856-project-member] Acquiring lock "eb4e172e-1eb1-4e31-a311-96f772f1a196" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1529.493940] env[61215]: DEBUG oslo_concurrency.lockutils [None req-fd7ba7c5-9f1d-43a3-bd18-ae56a3b70b3b tempest-ImagesOneServerNegativeTestJSON-1147663856 tempest-ImagesOneServerNegativeTestJSON-1147663856-project-member] Lock "eb4e172e-1eb1-4e31-a311-96f772f1a196" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1547.068244] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Acquiring lock "a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1547.068580] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Lock "a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1547.289271] env[61215]: WARNING oslo_vmware.rw_handles [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1547.289271] env[61215]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1547.289271] env[61215]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1547.289271] env[61215]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1547.289271] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1547.289271] env[61215]: ERROR oslo_vmware.rw_handles response.begin() [ 1547.289271] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1547.289271] env[61215]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1547.289271] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1547.289271] env[61215]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1547.289271] env[61215]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1547.289271] env[61215]: ERROR oslo_vmware.rw_handles [ 1547.289756] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Downloaded image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to vmware_temp/c17bb4cf-045e-488b-ab56-0646e6a3376a/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1547.291597] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Caching image {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1547.291857] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Copying Virtual Disk [datastore1] vmware_temp/c17bb4cf-045e-488b-ab56-0646e6a3376a/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk to [datastore1] vmware_temp/c17bb4cf-045e-488b-ab56-0646e6a3376a/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk {{(pid=61215) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1547.292170] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f9963c5b-20e4-45e3-8145-35d8fab20abe {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.301552] env[61215]: DEBUG oslo_vmware.api [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Waiting for the task: (returnval){ [ 1547.301552] env[61215]: value = "task-1690336" [ 1547.301552] env[61215]: _type = "Task" [ 1547.301552] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.311315] env[61215]: DEBUG oslo_vmware.api [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Task: {'id': task-1690336, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.811486] env[61215]: DEBUG oslo_vmware.exceptions [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Fault InvalidArgument not matched. {{(pid=61215) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1547.811767] env[61215]: DEBUG oslo_concurrency.lockutils [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1547.812321] env[61215]: ERROR nova.compute.manager [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1547.812321] env[61215]: Faults: ['InvalidArgument'] [ 1547.812321] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Traceback (most recent call last): [ 1547.812321] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1547.812321] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] yield resources [ 1547.812321] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1547.812321] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] self.driver.spawn(context, instance, image_meta, [ 1547.812321] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1547.812321] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1547.812321] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1547.812321] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] self._fetch_image_if_missing(context, vi) [ 1547.812321] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1547.812729] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] image_cache(vi, tmp_image_ds_loc) [ 1547.812729] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1547.812729] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] vm_util.copy_virtual_disk( [ 1547.812729] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1547.812729] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] session._wait_for_task(vmdk_copy_task) [ 1547.812729] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1547.812729] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] return self.wait_for_task(task_ref) [ 1547.812729] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1547.812729] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] return evt.wait() [ 1547.812729] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1547.812729] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] result = hub.switch() [ 1547.812729] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1547.812729] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] return self.greenlet.switch() [ 1547.813198] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1547.813198] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] self.f(*self.args, **self.kw) [ 1547.813198] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1547.813198] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] raise exceptions.translate_fault(task_info.error) [ 1547.813198] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1547.813198] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Faults: ['InvalidArgument'] [ 1547.813198] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] [ 1547.813198] env[61215]: INFO nova.compute.manager [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Terminating instance [ 1547.814202] env[61215]: DEBUG oslo_concurrency.lockutils [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1547.814425] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1547.815043] env[61215]: DEBUG nova.compute.manager [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1547.815243] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1547.815468] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c342bd8b-1dfa-4fef-9122-6e48e6549811 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.817786] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac5d7bab-1a65-4895-8bcf-e4114182f448 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.824768] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Unregistering the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1547.824988] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1fcfdf8b-7903-41e6-a32f-fd474c9e1849 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.827258] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1547.827437] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61215) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1547.829210] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2a06e77-b03f-40bc-ae03-bfac4efcc16c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.833891] env[61215]: DEBUG oslo_vmware.api [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Waiting for the task: (returnval){ [ 1547.833891] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52172719-36b4-6da4-f182-08cf6b46c4c0" [ 1547.833891] env[61215]: _type = "Task" [ 1547.833891] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.843032] env[61215]: DEBUG oslo_vmware.api [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52172719-36b4-6da4-f182-08cf6b46c4c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.913188] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Unregistered the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1547.913486] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Deleting contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1547.913733] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Deleting the datastore file [datastore1] ef0f6995-b272-4a45-a09d-5d8d38ffe23c {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1547.914035] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-69d88a1e-363e-448c-944b-69a115e43a96 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.920053] env[61215]: DEBUG oslo_vmware.api [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Waiting for the task: (returnval){ [ 1547.920053] env[61215]: value = "task-1690338" [ 1547.920053] env[61215]: _type = "Task" [ 1547.920053] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.927672] env[61215]: DEBUG oslo_vmware.api [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Task: {'id': task-1690338, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.344848] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Preparing fetch location {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1548.345162] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Creating directory with path [datastore1] vmware_temp/4fbed500-8ada-48c1-a66c-0f7eb0745519/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1548.345162] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4158fee0-db6e-46a3-8671-a3ad242a8a60 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.356164] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Created directory with path [datastore1] vmware_temp/4fbed500-8ada-48c1-a66c-0f7eb0745519/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1548.356359] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Fetch image to [datastore1] vmware_temp/4fbed500-8ada-48c1-a66c-0f7eb0745519/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1548.356527] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to [datastore1] vmware_temp/4fbed500-8ada-48c1-a66c-0f7eb0745519/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1548.357457] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a198f7ef-27a0-4f15-8343-47447a269b15 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.364292] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3643b01-79b8-449b-9d40-3c1f869c19bd {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.373205] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d42b671b-9d1d-4bc0-ba3a-f27f7c6f8d4e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.403400] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad60688b-9265-4fde-bfbf-cd09df8b3b73 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.409167] env[61215]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-fe4d27ed-893f-48da-80e6-87dd80a394a1 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.429501] env[61215]: DEBUG oslo_vmware.api [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Task: {'id': task-1690338, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072495} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.431187] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1548.431398] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Deleted contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1548.431576] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1548.431780] env[61215]: INFO nova.compute.manager [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1548.433542] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1548.435550] env[61215]: DEBUG nova.compute.claims [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Aborting claim: {{(pid=61215) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1548.435715] env[61215]: DEBUG oslo_concurrency.lockutils [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1548.435928] env[61215]: DEBUG oslo_concurrency.lockutils [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1548.543491] env[61215]: DEBUG oslo_vmware.rw_handles [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4fbed500-8ada-48c1-a66c-0f7eb0745519/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1548.605484] env[61215]: DEBUG oslo_vmware.rw_handles [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Completed reading data from the image iterator. {{(pid=61215) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1548.605683] env[61215]: DEBUG oslo_vmware.rw_handles [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4fbed500-8ada-48c1-a66c-0f7eb0745519/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1548.866845] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6447044a-10e6-4b34-a0c0-4e593e39d9c1 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.873690] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07eea0a4-af9f-4c43-8465-8a4a6a04f8f3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.903827] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae383c4f-20c5-4b5b-bba4-6112404eda27 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.911117] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beb154ca-856a-42b1-8c56-e76095146286 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.924056] env[61215]: DEBUG nova.compute.provider_tree [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1548.932406] env[61215]: DEBUG nova.scheduler.client.report [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1548.951124] env[61215]: DEBUG oslo_concurrency.lockutils [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.515s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1548.951670] env[61215]: ERROR nova.compute.manager [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1548.951670] env[61215]: Faults: ['InvalidArgument'] [ 1548.951670] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Traceback (most recent call last): [ 1548.951670] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1548.951670] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] self.driver.spawn(context, instance, image_meta, [ 1548.951670] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1548.951670] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1548.951670] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1548.951670] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] self._fetch_image_if_missing(context, vi) [ 1548.951670] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1548.951670] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] image_cache(vi, tmp_image_ds_loc) [ 1548.951670] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1548.952031] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] vm_util.copy_virtual_disk( [ 1548.952031] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1548.952031] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] session._wait_for_task(vmdk_copy_task) [ 1548.952031] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1548.952031] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] return self.wait_for_task(task_ref) [ 1548.952031] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1548.952031] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] return evt.wait() [ 1548.952031] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1548.952031] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] result = hub.switch() [ 1548.952031] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1548.952031] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] return self.greenlet.switch() [ 1548.952031] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1548.952031] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] self.f(*self.args, **self.kw) [ 1548.952440] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1548.952440] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] raise exceptions.translate_fault(task_info.error) [ 1548.952440] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1548.952440] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Faults: ['InvalidArgument'] [ 1548.952440] env[61215]: ERROR nova.compute.manager [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] [ 1548.952440] env[61215]: DEBUG nova.compute.utils [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] VimFaultException {{(pid=61215) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1548.953902] env[61215]: DEBUG nova.compute.manager [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Build of instance ef0f6995-b272-4a45-a09d-5d8d38ffe23c was re-scheduled: A specified parameter was not correct: fileType [ 1548.953902] env[61215]: Faults: ['InvalidArgument'] {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1548.954297] env[61215]: DEBUG nova.compute.manager [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Unplugging VIFs for instance {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1548.954473] env[61215]: DEBUG nova.compute.manager [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1548.954630] env[61215]: DEBUG nova.compute.manager [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1548.954797] env[61215]: DEBUG nova.network.neutron [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1549.286711] env[61215]: DEBUG nova.network.neutron [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1549.319748] env[61215]: INFO nova.compute.manager [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Took 0.36 seconds to deallocate network for instance. [ 1549.427886] env[61215]: INFO nova.scheduler.client.report [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Deleted allocations for instance ef0f6995-b272-4a45-a09d-5d8d38ffe23c [ 1549.460127] env[61215]: DEBUG oslo_concurrency.lockutils [None req-c3822165-819f-4a0a-95ae-6187929a5c71 tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Lock "ef0f6995-b272-4a45-a09d-5d8d38ffe23c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 300.087s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1549.466017] env[61215]: DEBUG oslo_concurrency.lockutils [None req-dba10e73-4705-48ac-9212-154e5afc7f6c tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Lock "ef0f6995-b272-4a45-a09d-5d8d38ffe23c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 102.598s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1549.466017] env[61215]: DEBUG oslo_concurrency.lockutils [None req-dba10e73-4705-48ac-9212-154e5afc7f6c tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Acquiring lock "ef0f6995-b272-4a45-a09d-5d8d38ffe23c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1549.466017] env[61215]: DEBUG oslo_concurrency.lockutils [None req-dba10e73-4705-48ac-9212-154e5afc7f6c tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Lock "ef0f6995-b272-4a45-a09d-5d8d38ffe23c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1549.466017] env[61215]: DEBUG oslo_concurrency.lockutils [None req-dba10e73-4705-48ac-9212-154e5afc7f6c tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Lock "ef0f6995-b272-4a45-a09d-5d8d38ffe23c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1549.466367] env[61215]: INFO nova.compute.manager [None req-dba10e73-4705-48ac-9212-154e5afc7f6c tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Terminating instance [ 1549.470113] env[61215]: DEBUG nova.compute.manager [None req-dba10e73-4705-48ac-9212-154e5afc7f6c tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1549.470113] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-dba10e73-4705-48ac-9212-154e5afc7f6c tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1549.470295] env[61215]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e601803b-ba6c-43b1-b1b6-3b55f1f03655 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.475031] env[61215]: DEBUG nova.compute.manager [None req-23c3cc1d-b542-474d-b396-9df22068a017 tempest-ServersTestFqdnHostnames-1487308690 tempest-ServersTestFqdnHostnames-1487308690-project-member] [instance: 6bf85a18-78f3-4471-bdc6-b600f90e1700] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1549.482172] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bde9a8dd-d59b-4515-8513-28bbc8d7de17 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.510924] env[61215]: WARNING nova.virt.vmwareapi.vmops [None req-dba10e73-4705-48ac-9212-154e5afc7f6c tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ef0f6995-b272-4a45-a09d-5d8d38ffe23c could not be found. [ 1549.511141] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-dba10e73-4705-48ac-9212-154e5afc7f6c tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1549.511307] env[61215]: INFO nova.compute.manager [None req-dba10e73-4705-48ac-9212-154e5afc7f6c tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1549.511548] env[61215]: DEBUG oslo.service.loopingcall [None req-dba10e73-4705-48ac-9212-154e5afc7f6c tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1549.512014] env[61215]: DEBUG nova.compute.manager [None req-23c3cc1d-b542-474d-b396-9df22068a017 tempest-ServersTestFqdnHostnames-1487308690 tempest-ServersTestFqdnHostnames-1487308690-project-member] [instance: 6bf85a18-78f3-4471-bdc6-b600f90e1700] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1549.512854] env[61215]: DEBUG nova.compute.manager [-] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1549.512947] env[61215]: DEBUG nova.network.neutron [-] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1549.540585] env[61215]: DEBUG nova.network.neutron [-] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1549.546013] env[61215]: DEBUG oslo_concurrency.lockutils [None req-23c3cc1d-b542-474d-b396-9df22068a017 tempest-ServersTestFqdnHostnames-1487308690 tempest-ServersTestFqdnHostnames-1487308690-project-member] Lock "6bf85a18-78f3-4471-bdc6-b600f90e1700" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 235.140s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1549.550165] env[61215]: INFO nova.compute.manager [-] [instance: ef0f6995-b272-4a45-a09d-5d8d38ffe23c] Took 0.04 seconds to deallocate network for instance. [ 1549.559983] env[61215]: DEBUG nova.compute.manager [None req-8191627f-da3b-488d-a377-a14e8ee3ea5a tempest-ServerMetadataNegativeTestJSON-232383651 tempest-ServerMetadataNegativeTestJSON-232383651-project-member] [instance: 11f8cfbd-7fdd-4a5f-9fde-477caa043b0d] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1549.586366] env[61215]: DEBUG nova.compute.manager [None req-8191627f-da3b-488d-a377-a14e8ee3ea5a tempest-ServerMetadataNegativeTestJSON-232383651 tempest-ServerMetadataNegativeTestJSON-232383651-project-member] [instance: 11f8cfbd-7fdd-4a5f-9fde-477caa043b0d] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1549.610327] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8191627f-da3b-488d-a377-a14e8ee3ea5a tempest-ServerMetadataNegativeTestJSON-232383651 tempest-ServerMetadataNegativeTestJSON-232383651-project-member] Lock "11f8cfbd-7fdd-4a5f-9fde-477caa043b0d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 230.441s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1549.624020] env[61215]: DEBUG nova.compute.manager [None req-3931e79d-7ea1-4737-869b-3fcba0a35f40 tempest-AttachInterfacesTestJSON-1825426080 tempest-AttachInterfacesTestJSON-1825426080-project-member] [instance: 01421743-cbfc-40d9-95aa-6b26422581e4] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1549.655837] env[61215]: DEBUG nova.compute.manager [None req-3931e79d-7ea1-4737-869b-3fcba0a35f40 tempest-AttachInterfacesTestJSON-1825426080 tempest-AttachInterfacesTestJSON-1825426080-project-member] [instance: 01421743-cbfc-40d9-95aa-6b26422581e4] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1549.665318] env[61215]: DEBUG oslo_concurrency.lockutils [None req-dba10e73-4705-48ac-9212-154e5afc7f6c tempest-TenantUsagesTestJSON-617819556 tempest-TenantUsagesTestJSON-617819556-project-member] Lock "ef0f6995-b272-4a45-a09d-5d8d38ffe23c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.200s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1549.684194] env[61215]: DEBUG oslo_concurrency.lockutils [None req-3931e79d-7ea1-4737-869b-3fcba0a35f40 tempest-AttachInterfacesTestJSON-1825426080 tempest-AttachInterfacesTestJSON-1825426080-project-member] Lock "01421743-cbfc-40d9-95aa-6b26422581e4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 229.635s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1549.696146] env[61215]: DEBUG nova.compute.manager [None req-55302737-dc78-4568-b2d5-66ed6de1dc6b tempest-ServerDiagnosticsNegativeTest-473520563 tempest-ServerDiagnosticsNegativeTest-473520563-project-member] [instance: c2d2f172-9d88-4b88-8cf7-bbce01619c73] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1549.723599] env[61215]: DEBUG nova.compute.manager [None req-55302737-dc78-4568-b2d5-66ed6de1dc6b tempest-ServerDiagnosticsNegativeTest-473520563 tempest-ServerDiagnosticsNegativeTest-473520563-project-member] [instance: c2d2f172-9d88-4b88-8cf7-bbce01619c73] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1549.751603] env[61215]: DEBUG oslo_concurrency.lockutils [None req-55302737-dc78-4568-b2d5-66ed6de1dc6b tempest-ServerDiagnosticsNegativeTest-473520563 tempest-ServerDiagnosticsNegativeTest-473520563-project-member] Lock "c2d2f172-9d88-4b88-8cf7-bbce01619c73" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 229.397s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1549.762638] env[61215]: DEBUG nova.compute.manager [None req-8a4874dd-af8c-4bbc-b0ec-cbacc9bd0ecf tempest-ServersTestBootFromVolume-1998329874 tempest-ServersTestBootFromVolume-1998329874-project-member] [instance: 7f0ff2e7-30e3-425b-beff-061ba242981a] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1549.791350] env[61215]: DEBUG nova.compute.manager [None req-8a4874dd-af8c-4bbc-b0ec-cbacc9bd0ecf tempest-ServersTestBootFromVolume-1998329874 tempest-ServersTestBootFromVolume-1998329874-project-member] [instance: 7f0ff2e7-30e3-425b-beff-061ba242981a] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1549.813846] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8a4874dd-af8c-4bbc-b0ec-cbacc9bd0ecf tempest-ServersTestBootFromVolume-1998329874 tempest-ServersTestBootFromVolume-1998329874-project-member] Lock "7f0ff2e7-30e3-425b-beff-061ba242981a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 208.388s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1549.823560] env[61215]: DEBUG nova.compute.manager [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1549.892311] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1549.892564] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1549.894324] env[61215]: INFO nova.compute.claims [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1550.287601] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a81eb888-0a5a-4e58-9b8d-934caf2877e7 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.295679] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a16464da-569c-43a4-a7d0-2fffa1f04be0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.325961] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3166019-dbc9-4d1f-8aa1-5c355e4d75a9 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.333446] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da33cc84-9a0f-4c87-823a-ae73b3ffc371 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.347831] env[61215]: DEBUG nova.compute.provider_tree [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1550.359167] env[61215]: DEBUG nova.scheduler.client.report [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1550.373942] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.481s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1550.374455] env[61215]: DEBUG nova.compute.manager [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Start building networks asynchronously for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1550.410069] env[61215]: DEBUG nova.compute.utils [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Using /dev/sd instead of None {{(pid=61215) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1550.412328] env[61215]: DEBUG nova.compute.manager [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Allocating IP information in the background. {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1550.412507] env[61215]: DEBUG nova.network.neutron [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] allocate_for_instance() {{(pid=61215) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1550.420693] env[61215]: DEBUG nova.compute.manager [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Start building block device mappings for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1550.492612] env[61215]: DEBUG nova.compute.manager [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Start spawning the instance on the hypervisor. {{(pid=61215) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1550.522594] env[61215]: DEBUG nova.virt.hardware [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:05:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:04:45Z,direct_url=,disk_format='vmdk',id=e91f0c25-9ff9-4937-8440-f47cfb2028bc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9bc3f57c82894c5d9e08b66e77a25dc5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:04:46Z,virtual_size=,visibility=), allow threads: False {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1550.522911] env[61215]: DEBUG nova.virt.hardware [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Flavor limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1550.523125] env[61215]: DEBUG nova.virt.hardware [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Image limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1550.523368] env[61215]: DEBUG nova.virt.hardware [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Flavor pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1550.523503] env[61215]: DEBUG nova.virt.hardware [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Image pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1550.523619] env[61215]: DEBUG nova.virt.hardware [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1550.523918] env[61215]: DEBUG nova.virt.hardware [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1550.524144] env[61215]: DEBUG nova.virt.hardware [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1550.524405] env[61215]: DEBUG nova.virt.hardware [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Got 1 possible topologies {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1550.524587] env[61215]: DEBUG nova.virt.hardware [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1550.524801] env[61215]: DEBUG nova.virt.hardware [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1550.525747] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aa3b5b9-e351-4a1f-a948-3e7f3cd3476e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.530433] env[61215]: DEBUG nova.policy [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '945e16883d9746eb9f07beca0bdd6c93', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '69b85dab58ff4d1780ed5ec65c1ff99c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61215) authorize /opt/stack/nova/nova/policy.py:203}} [ 1550.538077] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76eb7541-19aa-418a-a76b-4bfbcbf3d6c1 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.663140] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1551.154683] env[61215]: DEBUG nova.network.neutron [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Successfully created port: 75ae131c-3388-467b-b106-018b4dc95804 {{(pid=61215) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1551.654387] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1551.654659] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1551.974325] env[61215]: DEBUG nova.compute.manager [req-c4b92941-c2bb-486f-a60e-9e7451b1442f req-3314e804-fbee-47fe-9b86-5a6e044815e4 service nova] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Received event network-vif-plugged-75ae131c-3388-467b-b106-018b4dc95804 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1551.974550] env[61215]: DEBUG oslo_concurrency.lockutils [req-c4b92941-c2bb-486f-a60e-9e7451b1442f req-3314e804-fbee-47fe-9b86-5a6e044815e4 service nova] Acquiring lock "3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1551.974768] env[61215]: DEBUG oslo_concurrency.lockutils [req-c4b92941-c2bb-486f-a60e-9e7451b1442f req-3314e804-fbee-47fe-9b86-5a6e044815e4 service nova] Lock "3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1551.974950] env[61215]: DEBUG oslo_concurrency.lockutils [req-c4b92941-c2bb-486f-a60e-9e7451b1442f req-3314e804-fbee-47fe-9b86-5a6e044815e4 service nova] Lock "3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1551.975145] env[61215]: DEBUG nova.compute.manager [req-c4b92941-c2bb-486f-a60e-9e7451b1442f req-3314e804-fbee-47fe-9b86-5a6e044815e4 service nova] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] No waiting events found dispatching network-vif-plugged-75ae131c-3388-467b-b106-018b4dc95804 {{(pid=61215) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1551.975321] env[61215]: WARNING nova.compute.manager [req-c4b92941-c2bb-486f-a60e-9e7451b1442f req-3314e804-fbee-47fe-9b86-5a6e044815e4 service nova] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Received unexpected event network-vif-plugged-75ae131c-3388-467b-b106-018b4dc95804 for instance with vm_state building and task_state spawning. [ 1552.088187] env[61215]: DEBUG nova.network.neutron [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Successfully updated port: 75ae131c-3388-467b-b106-018b4dc95804 {{(pid=61215) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1552.099290] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Acquiring lock "refresh_cache-3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1552.099480] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Acquired lock "refresh_cache-3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1552.099582] env[61215]: DEBUG nova.network.neutron [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1552.173893] env[61215]: DEBUG nova.network.neutron [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1552.453190] env[61215]: DEBUG nova.network.neutron [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Updating instance_info_cache with network_info: [{"id": "75ae131c-3388-467b-b106-018b4dc95804", "address": "fa:16:3e:63:8e:43", "network": {"id": "7eeb68c4-c3ff-4e6f-b7f9-6320ce2d34b3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.141", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9bc3f57c82894c5d9e08b66e77a25dc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2383c27-232e-4745-9b0a-2dcbaabb188b", "external-id": "nsx-vlan-transportzone-350", "segmentation_id": 350, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75ae131c-33", "ovs_interfaceid": "75ae131c-3388-467b-b106-018b4dc95804", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1552.466652] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Releasing lock "refresh_cache-3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1552.466652] env[61215]: DEBUG nova.compute.manager [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Instance network_info: |[{"id": "75ae131c-3388-467b-b106-018b4dc95804", "address": "fa:16:3e:63:8e:43", "network": {"id": "7eeb68c4-c3ff-4e6f-b7f9-6320ce2d34b3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.141", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9bc3f57c82894c5d9e08b66e77a25dc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2383c27-232e-4745-9b0a-2dcbaabb188b", "external-id": "nsx-vlan-transportzone-350", "segmentation_id": 350, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75ae131c-33", "ovs_interfaceid": "75ae131c-3388-467b-b106-018b4dc95804", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1552.467351] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:63:8e:43', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c2383c27-232e-4745-9b0a-2dcbaabb188b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '75ae131c-3388-467b-b106-018b4dc95804', 'vif_model': 'vmxnet3'}] {{(pid=61215) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1552.475162] env[61215]: DEBUG oslo.service.loopingcall [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1552.475963] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Creating VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1552.476266] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8ccf61b7-560b-4ee1-9768-903cd1ec487b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.497948] env[61215]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1552.497948] env[61215]: value = "task-1690339" [ 1552.497948] env[61215]: _type = "Task" [ 1552.497948] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.506614] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690339, 'name': CreateVM_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.654103] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1552.654304] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Starting heal instance info cache {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 1552.654431] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Rebuilding the list of instances to heal {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1552.676951] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1552.677165] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1552.677293] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1552.677443] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1552.677825] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1552.677825] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1552.677965] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1552.678079] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1552.678210] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1552.678331] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1552.678468] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Didn't find any instances for network info cache update. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 1552.679154] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1552.679361] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1552.679516] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61215) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 1553.008619] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690339, 'name': CreateVM_Task, 'duration_secs': 0.328714} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1553.008886] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Created VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1553.009720] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1553.009848] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1553.010817] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1553.010817] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0014140a-734d-4314-911b-43b3d6c84260 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.016165] env[61215]: DEBUG oslo_vmware.api [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Waiting for the task: (returnval){ [ 1553.016165] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52712824-7007-4b49-cd4e-2b33dfd556f7" [ 1553.016165] env[61215]: _type = "Task" [ 1553.016165] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.028055] env[61215]: DEBUG oslo_vmware.api [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52712824-7007-4b49-cd4e-2b33dfd556f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.527493] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1553.527807] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Processing image e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1553.528052] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1553.654844] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1554.429454] env[61215]: DEBUG nova.compute.manager [req-a3420921-798c-4f5d-9805-2c3a8f8c4985 req-37f077ed-b1db-4d85-a27d-77cb3155fa4e service nova] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Received event network-changed-75ae131c-3388-467b-b106-018b4dc95804 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1554.429546] env[61215]: DEBUG nova.compute.manager [req-a3420921-798c-4f5d-9805-2c3a8f8c4985 req-37f077ed-b1db-4d85-a27d-77cb3155fa4e service nova] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Refreshing instance network info cache due to event network-changed-75ae131c-3388-467b-b106-018b4dc95804. {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 1554.429752] env[61215]: DEBUG oslo_concurrency.lockutils [req-a3420921-798c-4f5d-9805-2c3a8f8c4985 req-37f077ed-b1db-4d85-a27d-77cb3155fa4e service nova] Acquiring lock "refresh_cache-3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1554.429903] env[61215]: DEBUG oslo_concurrency.lockutils [req-a3420921-798c-4f5d-9805-2c3a8f8c4985 req-37f077ed-b1db-4d85-a27d-77cb3155fa4e service nova] Acquired lock "refresh_cache-3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1554.430119] env[61215]: DEBUG nova.network.neutron [req-a3420921-798c-4f5d-9805-2c3a8f8c4985 req-37f077ed-b1db-4d85-a27d-77cb3155fa4e service nova] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Refreshing network info cache for port 75ae131c-3388-467b-b106-018b4dc95804 {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1554.650020] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1554.678022] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1554.690673] env[61215]: DEBUG nova.network.neutron [req-a3420921-798c-4f5d-9805-2c3a8f8c4985 req-37f077ed-b1db-4d85-a27d-77cb3155fa4e service nova] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Updated VIF entry in instance network info cache for port 75ae131c-3388-467b-b106-018b4dc95804. {{(pid=61215) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1554.691025] env[61215]: DEBUG nova.network.neutron [req-a3420921-798c-4f5d-9805-2c3a8f8c4985 req-37f077ed-b1db-4d85-a27d-77cb3155fa4e service nova] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Updating instance_info_cache with network_info: [{"id": "75ae131c-3388-467b-b106-018b4dc95804", "address": "fa:16:3e:63:8e:43", "network": {"id": "7eeb68c4-c3ff-4e6f-b7f9-6320ce2d34b3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.141", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9bc3f57c82894c5d9e08b66e77a25dc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2383c27-232e-4745-9b0a-2dcbaabb188b", "external-id": "nsx-vlan-transportzone-350", "segmentation_id": 350, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75ae131c-33", "ovs_interfaceid": "75ae131c-3388-467b-b106-018b4dc95804", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1554.700919] env[61215]: DEBUG oslo_concurrency.lockutils [req-a3420921-798c-4f5d-9805-2c3a8f8c4985 req-37f077ed-b1db-4d85-a27d-77cb3155fa4e service nova] Releasing lock "refresh_cache-3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1555.653821] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1555.664547] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1555.664765] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1555.664936] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1555.665112] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61215) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1555.666210] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d714a56-de5a-463c-a4a2-a4ed76890263 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.675168] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51536a43-1429-4d7a-9006-3d3f9bd0c335 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.689438] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37da33cb-df09-4ae9-a220-995307f7ba18 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.695553] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ec3ad7a-748e-4e31-ad4c-40daa9899f71 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.723643] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181329MB free_disk=173GB free_vcpus=48 pci_devices=None {{(pid=61215) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1555.723797] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1555.723992] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1555.798644] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 67068a42-eba7-4529-9ebf-43d6865362b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1555.798815] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance ad40882f-de01-4bee-81dd-e91d07248d22 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1555.798944] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 97dae204-f706-41b5-bf9f-b320d022b2f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1555.799083] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 82698789-4c08-453b-a973-1916d1f94af6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1555.799208] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 0223d7b6-12e1-4418-97f2-012ed41daa7a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1555.799330] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1555.799448] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1555.799565] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance d49f702b-cd29-4491-938c-0291b351ef20 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1555.799682] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1555.799798] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1555.811020] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 8d4665c7-67de-4ab3-a8b7-596a5e1152ce has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1555.820968] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1555.830913] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 486e9745-b512-48ad-852a-166a7d63cf5e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1555.840157] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 5791e2e4-8ec9-4a4d-9ed9-afa829cdd6da has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1555.849378] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 2e186217-c1e1-40c6-8d84-988f35f6b93d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1555.858478] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance ad606ad3-d291-4a71-91d5-850a9795f301 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1555.867881] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance d8f5d964-fbcd-45fc-acb7-a0ca9e01b615 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1555.876909] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 4cdbd5ad-fbbb-4cee-811c-60cf47094cad has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1555.886260] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance dfbd190d-8565-4272-8320-eef68d00b9a1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1555.895156] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 26c74fa5-69fd-4a83-9ef7-8ed4103b5460 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1555.904152] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 008007b0-7ff0-4711-80dc-707efea20e75 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1555.913300] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance a0b2bf99-e82a-4866-844b-0e5ed758e78c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1555.922427] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 953e0804-8220-4fb0-a4af-8956be949a54 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1555.931298] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 3cf6ec8a-489b-4102-9fcb-581587345fc5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1555.940746] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 772cf4c4-cdc2-4a00-8891-908b31827a7b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1555.949881] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance eb4e172e-1eb1-4e31-a311-96f772f1a196 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1555.959318] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1555.959553] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1555.959755] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1556.246535] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-312ed6c2-7cd6-4c35-a98f-6f375e114cba {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.253846] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a951efe-d9d8-4bd3-afec-179a23756297 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.283404] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b8bb7cf-0188-4164-bee6-2bda9e790221 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.290748] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdbd13a2-0e2b-4082-b84b-153ae25d3b31 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.307588] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1556.316239] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1556.689381] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61215) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1556.689583] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.966s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1594.325052] env[61215]: WARNING oslo_vmware.rw_handles [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1594.325052] env[61215]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1594.325052] env[61215]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1594.325052] env[61215]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1594.325052] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1594.325052] env[61215]: ERROR oslo_vmware.rw_handles response.begin() [ 1594.325052] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1594.325052] env[61215]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1594.325052] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1594.325052] env[61215]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1594.325052] env[61215]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1594.325052] env[61215]: ERROR oslo_vmware.rw_handles [ 1594.325607] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Downloaded image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to vmware_temp/4fbed500-8ada-48c1-a66c-0f7eb0745519/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1594.328302] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Caching image {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1594.328302] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Copying Virtual Disk [datastore1] vmware_temp/4fbed500-8ada-48c1-a66c-0f7eb0745519/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk to [datastore1] vmware_temp/4fbed500-8ada-48c1-a66c-0f7eb0745519/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk {{(pid=61215) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1594.328302] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6f4b8cdf-6f17-4ba7-9603-090581328bae {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.336905] env[61215]: DEBUG oslo_vmware.api [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Waiting for the task: (returnval){ [ 1594.336905] env[61215]: value = "task-1690340" [ 1594.336905] env[61215]: _type = "Task" [ 1594.336905] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1594.344809] env[61215]: DEBUG oslo_vmware.api [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Task: {'id': task-1690340, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.848352] env[61215]: DEBUG oslo_vmware.exceptions [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Fault InvalidArgument not matched. {{(pid=61215) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1594.848846] env[61215]: DEBUG oslo_concurrency.lockutils [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1594.849281] env[61215]: ERROR nova.compute.manager [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1594.849281] env[61215]: Faults: ['InvalidArgument'] [ 1594.849281] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Traceback (most recent call last): [ 1594.849281] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1594.849281] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] yield resources [ 1594.849281] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1594.849281] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] self.driver.spawn(context, instance, image_meta, [ 1594.849281] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1594.849281] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1594.849281] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1594.849281] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] self._fetch_image_if_missing(context, vi) [ 1594.849281] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1594.849537] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] image_cache(vi, tmp_image_ds_loc) [ 1594.849537] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1594.849537] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] vm_util.copy_virtual_disk( [ 1594.849537] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1594.849537] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] session._wait_for_task(vmdk_copy_task) [ 1594.849537] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1594.849537] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] return self.wait_for_task(task_ref) [ 1594.849537] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1594.849537] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] return evt.wait() [ 1594.849537] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1594.849537] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] result = hub.switch() [ 1594.849537] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1594.849537] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] return self.greenlet.switch() [ 1594.849780] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1594.849780] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] self.f(*self.args, **self.kw) [ 1594.849780] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1594.849780] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] raise exceptions.translate_fault(task_info.error) [ 1594.849780] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1594.849780] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Faults: ['InvalidArgument'] [ 1594.849780] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] [ 1594.849780] env[61215]: INFO nova.compute.manager [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Terminating instance [ 1594.851253] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1594.851466] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1594.851708] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9f230298-5c8b-43ea-8df8-9473767b6131 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.853859] env[61215]: DEBUG oslo_concurrency.lockutils [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Acquiring lock "refresh_cache-ad40882f-de01-4bee-81dd-e91d07248d22" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1594.854081] env[61215]: DEBUG oslo_concurrency.lockutils [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Acquired lock "refresh_cache-ad40882f-de01-4bee-81dd-e91d07248d22" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1594.854219] env[61215]: DEBUG nova.network.neutron [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1594.862360] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1594.862546] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61215) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1594.863785] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff06b4a2-83f1-4047-8eb1-93e430b011b1 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.871789] env[61215]: DEBUG oslo_vmware.api [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Waiting for the task: (returnval){ [ 1594.871789] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52913159-bbd5-75de-09c9-970f10fb4940" [ 1594.871789] env[61215]: _type = "Task" [ 1594.871789] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1594.880071] env[61215]: DEBUG oslo_vmware.api [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52913159-bbd5-75de-09c9-970f10fb4940, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.888803] env[61215]: DEBUG nova.network.neutron [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1595.015493] env[61215]: DEBUG nova.network.neutron [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1595.023457] env[61215]: DEBUG oslo_concurrency.lockutils [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Releasing lock "refresh_cache-ad40882f-de01-4bee-81dd-e91d07248d22" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1595.023861] env[61215]: DEBUG nova.compute.manager [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1595.024064] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1595.025225] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a266aa4-45e5-4654-8fca-cfe9acb4fd9f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.033864] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Unregistering the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1595.034122] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-85a2c6b3-5db8-48ae-ad67-2f196907db6a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.061458] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Unregistered the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1595.061689] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Deleting contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1595.061873] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Deleting the datastore file [datastore1] ad40882f-de01-4bee-81dd-e91d07248d22 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1595.062149] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7e6371ae-a3e3-4966-854d-64ce356ea1e6 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.068481] env[61215]: DEBUG oslo_vmware.api [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Waiting for the task: (returnval){ [ 1595.068481] env[61215]: value = "task-1690342" [ 1595.068481] env[61215]: _type = "Task" [ 1595.068481] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1595.076034] env[61215]: DEBUG oslo_vmware.api [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Task: {'id': task-1690342, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.383276] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Preparing fetch location {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1595.383629] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Creating directory with path [datastore1] vmware_temp/55f7c45d-8f6d-46ef-b5df-ce5c006aeff6/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1595.383918] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b233fa72-dc7e-4b32-93a5-2f9c2fcc8f8a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.396394] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Created directory with path [datastore1] vmware_temp/55f7c45d-8f6d-46ef-b5df-ce5c006aeff6/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1595.396655] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Fetch image to [datastore1] vmware_temp/55f7c45d-8f6d-46ef-b5df-ce5c006aeff6/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1595.396912] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to [datastore1] vmware_temp/55f7c45d-8f6d-46ef-b5df-ce5c006aeff6/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1595.397710] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e4d4b6c-08e5-48b7-a541-61bef4cebef8 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.404753] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-799201fe-28e1-47c4-92d2-a62655a16d46 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.414254] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c73d6425-8da5-4f72-9002-5fe7b950dea8 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.444836] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-070b6e21-6adc-43d8-aac1-08083b200390 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.450953] env[61215]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-10978eda-75c6-402f-9b72-821bef9b72d1 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.471481] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1595.532987] env[61215]: DEBUG oslo_vmware.rw_handles [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/55f7c45d-8f6d-46ef-b5df-ce5c006aeff6/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1595.592873] env[61215]: DEBUG oslo_vmware.rw_handles [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Completed reading data from the image iterator. {{(pid=61215) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1595.593073] env[61215]: DEBUG oslo_vmware.rw_handles [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/55f7c45d-8f6d-46ef-b5df-ce5c006aeff6/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1595.596891] env[61215]: DEBUG oslo_vmware.api [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Task: {'id': task-1690342, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.036791} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.597163] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1595.597370] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Deleted contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1595.597564] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1595.597739] env[61215]: INFO nova.compute.manager [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Took 0.57 seconds to destroy the instance on the hypervisor. [ 1595.597976] env[61215]: DEBUG oslo.service.loopingcall [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1595.598222] env[61215]: DEBUG nova.compute.manager [-] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Skipping network deallocation for instance since networking was not requested. {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 1595.600461] env[61215]: DEBUG nova.compute.claims [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Aborting claim: {{(pid=61215) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1595.600644] env[61215]: DEBUG oslo_concurrency.lockutils [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1595.600882] env[61215]: DEBUG oslo_concurrency.lockutils [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1595.939178] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-404ea217-ebc5-4a2d-b793-59e25f1edb3f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.948037] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30676788-bc47-4a8a-880e-1b1b84ef5ea0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.979014] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6159a4c3-733a-491e-b410-4648cfd6d0fd {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.987027] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c993082b-fd46-4e08-83ad-1dc98a287590 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.001543] env[61215]: DEBUG nova.compute.provider_tree [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1596.010392] env[61215]: DEBUG nova.scheduler.client.report [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1596.024316] env[61215]: DEBUG oslo_concurrency.lockutils [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.423s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1596.024855] env[61215]: ERROR nova.compute.manager [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1596.024855] env[61215]: Faults: ['InvalidArgument'] [ 1596.024855] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Traceback (most recent call last): [ 1596.024855] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1596.024855] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] self.driver.spawn(context, instance, image_meta, [ 1596.024855] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1596.024855] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1596.024855] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1596.024855] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] self._fetch_image_if_missing(context, vi) [ 1596.024855] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1596.024855] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] image_cache(vi, tmp_image_ds_loc) [ 1596.024855] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1596.025120] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] vm_util.copy_virtual_disk( [ 1596.025120] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1596.025120] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] session._wait_for_task(vmdk_copy_task) [ 1596.025120] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1596.025120] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] return self.wait_for_task(task_ref) [ 1596.025120] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1596.025120] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] return evt.wait() [ 1596.025120] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1596.025120] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] result = hub.switch() [ 1596.025120] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1596.025120] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] return self.greenlet.switch() [ 1596.025120] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1596.025120] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] self.f(*self.args, **self.kw) [ 1596.025383] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1596.025383] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] raise exceptions.translate_fault(task_info.error) [ 1596.025383] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1596.025383] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Faults: ['InvalidArgument'] [ 1596.025383] env[61215]: ERROR nova.compute.manager [instance: ad40882f-de01-4bee-81dd-e91d07248d22] [ 1596.025637] env[61215]: DEBUG nova.compute.utils [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] VimFaultException {{(pid=61215) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1596.026982] env[61215]: DEBUG nova.compute.manager [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Build of instance ad40882f-de01-4bee-81dd-e91d07248d22 was re-scheduled: A specified parameter was not correct: fileType [ 1596.026982] env[61215]: Faults: ['InvalidArgument'] {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1596.027371] env[61215]: DEBUG nova.compute.manager [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Unplugging VIFs for instance {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1596.027602] env[61215]: DEBUG oslo_concurrency.lockutils [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Acquiring lock "refresh_cache-ad40882f-de01-4bee-81dd-e91d07248d22" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1596.027754] env[61215]: DEBUG oslo_concurrency.lockutils [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Acquired lock "refresh_cache-ad40882f-de01-4bee-81dd-e91d07248d22" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1596.027919] env[61215]: DEBUG nova.network.neutron [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1596.053028] env[61215]: DEBUG nova.network.neutron [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1596.147785] env[61215]: DEBUG nova.network.neutron [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1596.157758] env[61215]: DEBUG oslo_concurrency.lockutils [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Releasing lock "refresh_cache-ad40882f-de01-4bee-81dd-e91d07248d22" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1596.157758] env[61215]: DEBUG nova.compute.manager [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1596.157758] env[61215]: DEBUG nova.compute.manager [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Skipping network deallocation for instance since networking was not requested. {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 1596.252186] env[61215]: INFO nova.scheduler.client.report [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Deleted allocations for instance ad40882f-de01-4bee-81dd-e91d07248d22 [ 1596.274264] env[61215]: DEBUG oslo_concurrency.lockutils [None req-13844863-8a64-436c-a38f-7cae8093c276 tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Lock "ad40882f-de01-4bee-81dd-e91d07248d22" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 331.119s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1596.275421] env[61215]: DEBUG oslo_concurrency.lockutils [None req-634da85f-a5d7-4f72-9fc5-c772e74bd6bd tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Lock "ad40882f-de01-4bee-81dd-e91d07248d22" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 133.656s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1596.275647] env[61215]: DEBUG oslo_concurrency.lockutils [None req-634da85f-a5d7-4f72-9fc5-c772e74bd6bd tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Acquiring lock "ad40882f-de01-4bee-81dd-e91d07248d22-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1596.275855] env[61215]: DEBUG oslo_concurrency.lockutils [None req-634da85f-a5d7-4f72-9fc5-c772e74bd6bd tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Lock "ad40882f-de01-4bee-81dd-e91d07248d22-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1596.276031] env[61215]: DEBUG oslo_concurrency.lockutils [None req-634da85f-a5d7-4f72-9fc5-c772e74bd6bd tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Lock "ad40882f-de01-4bee-81dd-e91d07248d22-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1596.278629] env[61215]: INFO nova.compute.manager [None req-634da85f-a5d7-4f72-9fc5-c772e74bd6bd tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Terminating instance [ 1596.279717] env[61215]: DEBUG oslo_concurrency.lockutils [None req-634da85f-a5d7-4f72-9fc5-c772e74bd6bd tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Acquiring lock "refresh_cache-ad40882f-de01-4bee-81dd-e91d07248d22" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1596.280018] env[61215]: DEBUG oslo_concurrency.lockutils [None req-634da85f-a5d7-4f72-9fc5-c772e74bd6bd tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Acquired lock "refresh_cache-ad40882f-de01-4bee-81dd-e91d07248d22" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1596.280085] env[61215]: DEBUG nova.network.neutron [None req-634da85f-a5d7-4f72-9fc5-c772e74bd6bd tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1596.288969] env[61215]: DEBUG nova.compute.manager [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1596.316163] env[61215]: DEBUG nova.network.neutron [None req-634da85f-a5d7-4f72-9fc5-c772e74bd6bd tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1596.339260] env[61215]: DEBUG oslo_concurrency.lockutils [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1596.339518] env[61215]: DEBUG oslo_concurrency.lockutils [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1596.341360] env[61215]: INFO nova.compute.claims [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1596.394759] env[61215]: DEBUG nova.network.neutron [None req-634da85f-a5d7-4f72-9fc5-c772e74bd6bd tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1596.403836] env[61215]: DEBUG oslo_concurrency.lockutils [None req-634da85f-a5d7-4f72-9fc5-c772e74bd6bd tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Releasing lock "refresh_cache-ad40882f-de01-4bee-81dd-e91d07248d22" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1596.404044] env[61215]: DEBUG nova.compute.manager [None req-634da85f-a5d7-4f72-9fc5-c772e74bd6bd tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1596.404625] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-634da85f-a5d7-4f72-9fc5-c772e74bd6bd tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1596.404720] env[61215]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-15bdfdaa-b2cf-43aa-877c-37777ee66acf {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.413398] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9066ded3-9399-4273-adab-86e9cc933645 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.445166] env[61215]: WARNING nova.virt.vmwareapi.vmops [None req-634da85f-a5d7-4f72-9fc5-c772e74bd6bd tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ad40882f-de01-4bee-81dd-e91d07248d22 could not be found. [ 1596.445430] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-634da85f-a5d7-4f72-9fc5-c772e74bd6bd tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1596.445619] env[61215]: INFO nova.compute.manager [None req-634da85f-a5d7-4f72-9fc5-c772e74bd6bd tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1596.445887] env[61215]: DEBUG oslo.service.loopingcall [None req-634da85f-a5d7-4f72-9fc5-c772e74bd6bd tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1596.448463] env[61215]: DEBUG nova.compute.manager [-] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1596.448559] env[61215]: DEBUG nova.network.neutron [-] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1596.466584] env[61215]: DEBUG nova.network.neutron [-] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1596.475035] env[61215]: DEBUG nova.network.neutron [-] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1596.485525] env[61215]: INFO nova.compute.manager [-] [instance: ad40882f-de01-4bee-81dd-e91d07248d22] Took 0.04 seconds to deallocate network for instance. [ 1596.564789] env[61215]: DEBUG oslo_concurrency.lockutils [None req-634da85f-a5d7-4f72-9fc5-c772e74bd6bd tempest-ServerDiagnosticsV248Test-87911557 tempest-ServerDiagnosticsV248Test-87911557-project-member] Lock "ad40882f-de01-4bee-81dd-e91d07248d22" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.289s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1596.715583] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6a91e45-225d-439d-85f5-0581a13c924a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.723323] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0120e744-a20d-4865-965a-8ec2f8f2220c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.754059] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec295f95-e352-4eb4-8a76-3fdd285e3e25 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.761131] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d3429e6-bda7-4e64-8379-cd3df6938317 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.775160] env[61215]: DEBUG nova.compute.provider_tree [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1596.785442] env[61215]: DEBUG nova.scheduler.client.report [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1596.803414] env[61215]: DEBUG oslo_concurrency.lockutils [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.464s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1596.803856] env[61215]: DEBUG nova.compute.manager [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Start building networks asynchronously for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1596.842834] env[61215]: DEBUG nova.compute.utils [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Using /dev/sd instead of None {{(pid=61215) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1596.844457] env[61215]: DEBUG nova.compute.manager [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Allocating IP information in the background. {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1596.844457] env[61215]: DEBUG nova.network.neutron [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] allocate_for_instance() {{(pid=61215) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1596.852669] env[61215]: DEBUG nova.compute.manager [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Start building block device mappings for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1596.902981] env[61215]: DEBUG nova.policy [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '314a93279f8a46f4bae8537445a4a79b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f93d0998de584ac899f94fb170216f55', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61215) authorize /opt/stack/nova/nova/policy.py:203}} [ 1596.914700] env[61215]: DEBUG nova.compute.manager [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Start spawning the instance on the hypervisor. {{(pid=61215) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1596.941070] env[61215]: DEBUG nova.virt.hardware [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:05:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:04:45Z,direct_url=,disk_format='vmdk',id=e91f0c25-9ff9-4937-8440-f47cfb2028bc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9bc3f57c82894c5d9e08b66e77a25dc5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:04:46Z,virtual_size=,visibility=), allow threads: False {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1596.941436] env[61215]: DEBUG nova.virt.hardware [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Flavor limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1596.941645] env[61215]: DEBUG nova.virt.hardware [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Image limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1596.941846] env[61215]: DEBUG nova.virt.hardware [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Flavor pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1596.941998] env[61215]: DEBUG nova.virt.hardware [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Image pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1596.942169] env[61215]: DEBUG nova.virt.hardware [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1596.942385] env[61215]: DEBUG nova.virt.hardware [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1596.942549] env[61215]: DEBUG nova.virt.hardware [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1596.942717] env[61215]: DEBUG nova.virt.hardware [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Got 1 possible topologies {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1596.942883] env[61215]: DEBUG nova.virt.hardware [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1596.943070] env[61215]: DEBUG nova.virt.hardware [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1596.943931] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c821161-5a8e-4356-a700-14ca17eea011 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.952479] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6cc477e-940a-4f6a-abbf-56e139e1ed5f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.390608] env[61215]: DEBUG nova.network.neutron [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Successfully created port: 0f4468e4-6132-480b-9026-237dd631523d {{(pid=61215) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1598.207590] env[61215]: DEBUG nova.compute.manager [req-050a639c-ce14-4f7d-a2b5-13faac6b5b71 req-fdcc04a9-5eee-4ccc-8aef-c8b6ed5a31cd service nova] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Received event network-vif-plugged-0f4468e4-6132-480b-9026-237dd631523d {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1598.207590] env[61215]: DEBUG oslo_concurrency.lockutils [req-050a639c-ce14-4f7d-a2b5-13faac6b5b71 req-fdcc04a9-5eee-4ccc-8aef-c8b6ed5a31cd service nova] Acquiring lock "8d4665c7-67de-4ab3-a8b7-596a5e1152ce-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1598.207590] env[61215]: DEBUG oslo_concurrency.lockutils [req-050a639c-ce14-4f7d-a2b5-13faac6b5b71 req-fdcc04a9-5eee-4ccc-8aef-c8b6ed5a31cd service nova] Lock "8d4665c7-67de-4ab3-a8b7-596a5e1152ce-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1598.207590] env[61215]: DEBUG oslo_concurrency.lockutils [req-050a639c-ce14-4f7d-a2b5-13faac6b5b71 req-fdcc04a9-5eee-4ccc-8aef-c8b6ed5a31cd service nova] Lock "8d4665c7-67de-4ab3-a8b7-596a5e1152ce-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1598.208392] env[61215]: DEBUG nova.compute.manager [req-050a639c-ce14-4f7d-a2b5-13faac6b5b71 req-fdcc04a9-5eee-4ccc-8aef-c8b6ed5a31cd service nova] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] No waiting events found dispatching network-vif-plugged-0f4468e4-6132-480b-9026-237dd631523d {{(pid=61215) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1598.208713] env[61215]: WARNING nova.compute.manager [req-050a639c-ce14-4f7d-a2b5-13faac6b5b71 req-fdcc04a9-5eee-4ccc-8aef-c8b6ed5a31cd service nova] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Received unexpected event network-vif-plugged-0f4468e4-6132-480b-9026-237dd631523d for instance with vm_state building and task_state spawning. [ 1598.265428] env[61215]: DEBUG nova.network.neutron [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Successfully updated port: 0f4468e4-6132-480b-9026-237dd631523d {{(pid=61215) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1598.276715] env[61215]: DEBUG oslo_concurrency.lockutils [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Acquiring lock "refresh_cache-8d4665c7-67de-4ab3-a8b7-596a5e1152ce" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1598.276715] env[61215]: DEBUG oslo_concurrency.lockutils [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Acquired lock "refresh_cache-8d4665c7-67de-4ab3-a8b7-596a5e1152ce" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1598.276715] env[61215]: DEBUG nova.network.neutron [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1598.359754] env[61215]: DEBUG nova.network.neutron [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1598.879675] env[61215]: DEBUG nova.network.neutron [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Updating instance_info_cache with network_info: [{"id": "0f4468e4-6132-480b-9026-237dd631523d", "address": "fa:16:3e:d7:fb:ff", "network": {"id": "420a2f32-042f-434d-9eb7-418346e54cf8", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-563920065-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f93d0998de584ac899f94fb170216f55", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f4468e4-61", "ovs_interfaceid": "0f4468e4-6132-480b-9026-237dd631523d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1598.896272] env[61215]: DEBUG oslo_concurrency.lockutils [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Releasing lock "refresh_cache-8d4665c7-67de-4ab3-a8b7-596a5e1152ce" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1598.896592] env[61215]: DEBUG nova.compute.manager [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Instance network_info: |[{"id": "0f4468e4-6132-480b-9026-237dd631523d", "address": "fa:16:3e:d7:fb:ff", "network": {"id": "420a2f32-042f-434d-9eb7-418346e54cf8", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-563920065-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f93d0998de584ac899f94fb170216f55", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f4468e4-61", "ovs_interfaceid": "0f4468e4-6132-480b-9026-237dd631523d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1598.897015] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d7:fb:ff', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c5652322-9f10-4996-baed-4c0aa13a1b4e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0f4468e4-6132-480b-9026-237dd631523d', 'vif_model': 'vmxnet3'}] {{(pid=61215) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1598.905039] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Creating folder: Project (f93d0998de584ac899f94fb170216f55). Parent ref: group-v352463. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1598.906031] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3aeaaea3-1c8e-4986-bd8c-9c1fe0c2e906 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.917432] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Created folder: Project (f93d0998de584ac899f94fb170216f55) in parent group-v352463. [ 1598.917632] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Creating folder: Instances. Parent ref: group-v352518. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1598.917868] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5fd04aa0-3c1f-4bea-9c9b-5c1c8c55f7ea {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.929474] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Created folder: Instances in parent group-v352518. [ 1598.929669] env[61215]: DEBUG oslo.service.loopingcall [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1598.929855] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Creating VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1598.930066] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-46d11961-5a84-4ee4-91e2-c36042d632d9 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.950026] env[61215]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1598.950026] env[61215]: value = "task-1690345" [ 1598.950026] env[61215]: _type = "Task" [ 1598.950026] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.957801] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690345, 'name': CreateVM_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.472104] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690345, 'name': CreateVM_Task, 'duration_secs': 0.307875} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1599.472804] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Created VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1599.473217] env[61215]: DEBUG oslo_concurrency.lockutils [None req-e0a95887-8043-4c33-bca7-5bfb81d4ca65 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Acquiring lock "3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1599.473843] env[61215]: DEBUG oslo_concurrency.lockutils [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1599.474011] env[61215]: DEBUG oslo_concurrency.lockutils [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1599.474326] env[61215]: DEBUG oslo_concurrency.lockutils [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1599.474782] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b145aee-d7cf-43d7-890d-448dbccb550c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.481357] env[61215]: DEBUG oslo_vmware.api [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Waiting for the task: (returnval){ [ 1599.481357] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]523ac0c2-101e-1445-1948-477a1810ca16" [ 1599.481357] env[61215]: _type = "Task" [ 1599.481357] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1599.490276] env[61215]: DEBUG oslo_vmware.api [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]523ac0c2-101e-1445-1948-477a1810ca16, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.997818] env[61215]: DEBUG oslo_concurrency.lockutils [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1599.998232] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Processing image e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1599.998546] env[61215]: DEBUG oslo_concurrency.lockutils [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1600.299177] env[61215]: DEBUG nova.compute.manager [req-437de3a3-9717-4f2a-ae7d-a0ce95b475ef req-c0f9d1f0-941e-4df0-9ecf-dbb425e1e1b8 service nova] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Received event network-changed-0f4468e4-6132-480b-9026-237dd631523d {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1600.299417] env[61215]: DEBUG nova.compute.manager [req-437de3a3-9717-4f2a-ae7d-a0ce95b475ef req-c0f9d1f0-941e-4df0-9ecf-dbb425e1e1b8 service nova] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Refreshing instance network info cache due to event network-changed-0f4468e4-6132-480b-9026-237dd631523d. {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 1600.299622] env[61215]: DEBUG oslo_concurrency.lockutils [req-437de3a3-9717-4f2a-ae7d-a0ce95b475ef req-c0f9d1f0-941e-4df0-9ecf-dbb425e1e1b8 service nova] Acquiring lock "refresh_cache-8d4665c7-67de-4ab3-a8b7-596a5e1152ce" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1600.299769] env[61215]: DEBUG oslo_concurrency.lockutils [req-437de3a3-9717-4f2a-ae7d-a0ce95b475ef req-c0f9d1f0-941e-4df0-9ecf-dbb425e1e1b8 service nova] Acquired lock "refresh_cache-8d4665c7-67de-4ab3-a8b7-596a5e1152ce" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1600.299954] env[61215]: DEBUG nova.network.neutron [req-437de3a3-9717-4f2a-ae7d-a0ce95b475ef req-c0f9d1f0-941e-4df0-9ecf-dbb425e1e1b8 service nova] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Refreshing network info cache for port 0f4468e4-6132-480b-9026-237dd631523d {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1601.069966] env[61215]: DEBUG nova.network.neutron [req-437de3a3-9717-4f2a-ae7d-a0ce95b475ef req-c0f9d1f0-941e-4df0-9ecf-dbb425e1e1b8 service nova] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Updated VIF entry in instance network info cache for port 0f4468e4-6132-480b-9026-237dd631523d. {{(pid=61215) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1601.070350] env[61215]: DEBUG nova.network.neutron [req-437de3a3-9717-4f2a-ae7d-a0ce95b475ef req-c0f9d1f0-941e-4df0-9ecf-dbb425e1e1b8 service nova] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Updating instance_info_cache with network_info: [{"id": "0f4468e4-6132-480b-9026-237dd631523d", "address": "fa:16:3e:d7:fb:ff", "network": {"id": "420a2f32-042f-434d-9eb7-418346e54cf8", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-563920065-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f93d0998de584ac899f94fb170216f55", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c5652322-9f10-4996-baed-4c0aa13a1b4e", "external-id": "nsx-vlan-transportzone-941", "segmentation_id": 941, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f4468e4-61", "ovs_interfaceid": "0f4468e4-6132-480b-9026-237dd631523d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1601.085464] env[61215]: DEBUG oslo_concurrency.lockutils [req-437de3a3-9717-4f2a-ae7d-a0ce95b475ef req-c0f9d1f0-941e-4df0-9ecf-dbb425e1e1b8 service nova] Releasing lock "refresh_cache-8d4665c7-67de-4ab3-a8b7-596a5e1152ce" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1603.261684] env[61215]: DEBUG oslo_concurrency.lockutils [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Acquiring lock "c233ab81-232d-49be-a176-bf846f0d8cc3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1603.261948] env[61215]: DEBUG oslo_concurrency.lockutils [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Lock "c233ab81-232d-49be-a176-bf846f0d8cc3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1612.686750] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1612.686750] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1612.686750] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1613.655797] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1613.655990] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61215) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 1614.655032] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1614.655032] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Starting heal instance info cache {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 1614.655032] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Rebuilding the list of instances to heal {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1614.680049] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1614.680049] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1614.680049] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1614.680049] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1614.680049] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1614.680241] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1614.680241] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1614.680241] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1614.680241] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1614.680241] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1614.680356] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Didn't find any instances for network info cache update. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 1614.680623] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1614.682025] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1616.655237] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1616.655503] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1616.668132] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1616.668415] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1616.668602] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1616.668761] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61215) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1616.670352] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dc6922e-9d97-4264-8a22-1d19e39983d0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.679121] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7df26c0-0e47-4549-aa14-3df20634bba4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.694639] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89abc0f8-3f31-4695-ba43-9664d9b12c9a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.701301] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f00afd9d-c365-43c4-a318-268c3f14947b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.729798] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181338MB free_disk=173GB free_vcpus=48 pci_devices=None {{(pid=61215) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1616.729956] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1616.730169] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1616.803135] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 67068a42-eba7-4529-9ebf-43d6865362b1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1616.803307] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 97dae204-f706-41b5-bf9f-b320d022b2f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1616.803434] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 82698789-4c08-453b-a973-1916d1f94af6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1616.803557] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 0223d7b6-12e1-4418-97f2-012ed41daa7a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1616.803675] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1616.803791] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1616.803905] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance d49f702b-cd29-4491-938c-0291b351ef20 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1616.804028] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1616.804147] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1616.804257] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 8d4665c7-67de-4ab3-a8b7-596a5e1152ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1616.815400] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1616.826121] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 486e9745-b512-48ad-852a-166a7d63cf5e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1616.837708] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 5791e2e4-8ec9-4a4d-9ed9-afa829cdd6da has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1616.848028] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 2e186217-c1e1-40c6-8d84-988f35f6b93d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1616.858687] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance ad606ad3-d291-4a71-91d5-850a9795f301 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1616.870034] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance d8f5d964-fbcd-45fc-acb7-a0ca9e01b615 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1616.880843] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 4cdbd5ad-fbbb-4cee-811c-60cf47094cad has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1616.891751] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance dfbd190d-8565-4272-8320-eef68d00b9a1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1616.902323] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 26c74fa5-69fd-4a83-9ef7-8ed4103b5460 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1616.913406] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 008007b0-7ff0-4711-80dc-707efea20e75 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1616.924520] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance a0b2bf99-e82a-4866-844b-0e5ed758e78c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1616.934928] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 953e0804-8220-4fb0-a4af-8956be949a54 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1616.946273] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 3cf6ec8a-489b-4102-9fcb-581587345fc5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1616.956336] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 772cf4c4-cdc2-4a00-8891-908b31827a7b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1616.968744] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance eb4e172e-1eb1-4e31-a311-96f772f1a196 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1616.979890] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1616.990759] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance c233ab81-232d-49be-a176-bf846f0d8cc3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1616.990999] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1616.991166] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1617.294814] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e05c35a-351a-4392-8e1b-ba8025db91a8 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.302473] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ae68640-1313-4915-8281-09b98563160b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.331572] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfd00c99-398c-4e01-8570-e3cb7f0493d5 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.339019] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f955c616-9f8e-413f-b0c5-d733558a40e1 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.352162] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1617.362133] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1617.377075] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61215) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1617.377243] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.647s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1642.325175] env[61215]: WARNING oslo_vmware.rw_handles [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1642.325175] env[61215]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1642.325175] env[61215]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1642.325175] env[61215]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1642.325175] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1642.325175] env[61215]: ERROR oslo_vmware.rw_handles response.begin() [ 1642.325175] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1642.325175] env[61215]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1642.325175] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1642.325175] env[61215]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1642.325175] env[61215]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1642.325175] env[61215]: ERROR oslo_vmware.rw_handles [ 1642.325750] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Downloaded image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to vmware_temp/55f7c45d-8f6d-46ef-b5df-ce5c006aeff6/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1642.327618] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Caching image {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1642.327868] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Copying Virtual Disk [datastore1] vmware_temp/55f7c45d-8f6d-46ef-b5df-ce5c006aeff6/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk to [datastore1] vmware_temp/55f7c45d-8f6d-46ef-b5df-ce5c006aeff6/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk {{(pid=61215) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1642.328167] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-78854939-7b4c-417d-b98b-42b64b975374 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.337279] env[61215]: DEBUG oslo_vmware.api [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Waiting for the task: (returnval){ [ 1642.337279] env[61215]: value = "task-1690346" [ 1642.337279] env[61215]: _type = "Task" [ 1642.337279] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1642.344694] env[61215]: DEBUG oslo_vmware.api [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Task: {'id': task-1690346, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.848736] env[61215]: DEBUG oslo_vmware.exceptions [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Fault InvalidArgument not matched. {{(pid=61215) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1642.849081] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1642.849589] env[61215]: ERROR nova.compute.manager [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1642.849589] env[61215]: Faults: ['InvalidArgument'] [ 1642.849589] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Traceback (most recent call last): [ 1642.849589] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1642.849589] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] yield resources [ 1642.849589] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1642.849589] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] self.driver.spawn(context, instance, image_meta, [ 1642.849589] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1642.849589] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1642.849589] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1642.849589] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] self._fetch_image_if_missing(context, vi) [ 1642.849589] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1642.849945] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] image_cache(vi, tmp_image_ds_loc) [ 1642.849945] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1642.849945] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] vm_util.copy_virtual_disk( [ 1642.849945] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1642.849945] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] session._wait_for_task(vmdk_copy_task) [ 1642.849945] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1642.849945] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] return self.wait_for_task(task_ref) [ 1642.849945] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1642.849945] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] return evt.wait() [ 1642.849945] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1642.849945] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] result = hub.switch() [ 1642.849945] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1642.849945] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] return self.greenlet.switch() [ 1642.850287] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1642.850287] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] self.f(*self.args, **self.kw) [ 1642.850287] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1642.850287] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] raise exceptions.translate_fault(task_info.error) [ 1642.850287] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1642.850287] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Faults: ['InvalidArgument'] [ 1642.850287] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] [ 1642.850287] env[61215]: INFO nova.compute.manager [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Terminating instance [ 1642.851511] env[61215]: DEBUG oslo_concurrency.lockutils [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1642.851715] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1642.851949] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8d06946f-b436-4432-86b9-c4c7b1bb58ec {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.854082] env[61215]: DEBUG nova.compute.manager [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1642.854286] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1642.855010] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-493158cd-1afa-4827-a147-a42352174e92 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.861613] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Unregistering the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1642.861823] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e91f216e-d1f0-4c34-b7c6-479846506a3a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.863928] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1642.864118] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61215) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1642.865082] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a17200a-527f-4e14-83ea-ff08083d5c4f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.869487] env[61215]: DEBUG oslo_vmware.api [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Waiting for the task: (returnval){ [ 1642.869487] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]5291fce0-5cf1-bc79-9942-4541587313b8" [ 1642.869487] env[61215]: _type = "Task" [ 1642.869487] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1642.876293] env[61215]: DEBUG oslo_vmware.api [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5291fce0-5cf1-bc79-9942-4541587313b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.934846] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Unregistered the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1642.935087] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Deleting contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1642.935278] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Deleting the datastore file [datastore1] 67068a42-eba7-4529-9ebf-43d6865362b1 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1642.935544] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-065071d7-8895-4c9d-9566-32bf78097ac2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.941776] env[61215]: DEBUG oslo_vmware.api [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Waiting for the task: (returnval){ [ 1642.941776] env[61215]: value = "task-1690348" [ 1642.941776] env[61215]: _type = "Task" [ 1642.941776] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1642.950907] env[61215]: DEBUG oslo_vmware.api [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Task: {'id': task-1690348, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.380110] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Preparing fetch location {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1643.380389] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Creating directory with path [datastore1] vmware_temp/1930bddc-fb7b-4df6-ba78-8e57e88245d1/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1643.380613] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aa88f267-4a8c-439d-a831-4d15c305003b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.392214] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Created directory with path [datastore1] vmware_temp/1930bddc-fb7b-4df6-ba78-8e57e88245d1/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1643.392414] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Fetch image to [datastore1] vmware_temp/1930bddc-fb7b-4df6-ba78-8e57e88245d1/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1643.392583] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to [datastore1] vmware_temp/1930bddc-fb7b-4df6-ba78-8e57e88245d1/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1643.393347] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3edf2c0-f477-48b7-bba4-9f371833ac8d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.401584] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2d162ea-8025-40e2-b098-22d614370dbd {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.410754] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8419a62a-de72-42d4-b760-62672526c7a9 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.442251] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0059f9b6-3949-4f33-b418-928ba17dccf3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.453854] env[61215]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5833f1ab-2387-49d2-b021-1231559c8c02 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.455593] env[61215]: DEBUG oslo_vmware.api [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Task: {'id': task-1690348, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076577} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1643.455841] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1643.456030] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Deleted contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1643.456208] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1643.456385] env[61215]: INFO nova.compute.manager [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1643.458473] env[61215]: DEBUG nova.compute.claims [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Aborting claim: {{(pid=61215) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1643.458684] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1643.458909] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1643.477574] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1643.538738] env[61215]: DEBUG oslo_vmware.rw_handles [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1930bddc-fb7b-4df6-ba78-8e57e88245d1/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1643.602345] env[61215]: DEBUG oslo_vmware.rw_handles [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Completed reading data from the image iterator. {{(pid=61215) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1643.602534] env[61215]: DEBUG oslo_vmware.rw_handles [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1930bddc-fb7b-4df6-ba78-8e57e88245d1/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1643.884845] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4e73eb2-950b-4b46-a652-3a9ecdf55b46 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.891943] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58f10c94-fa2d-49ed-9c09-b5a9f25fffef {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.920652] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff9d9934-a973-4742-8d9d-01ac60d2f03c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.927529] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23123c74-6b77-464f-a07e-32ec5d58ade1 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.940261] env[61215]: DEBUG nova.compute.provider_tree [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1643.949459] env[61215]: DEBUG nova.scheduler.client.report [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1643.964410] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.505s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1643.964988] env[61215]: ERROR nova.compute.manager [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1643.964988] env[61215]: Faults: ['InvalidArgument'] [ 1643.964988] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Traceback (most recent call last): [ 1643.964988] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1643.964988] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] self.driver.spawn(context, instance, image_meta, [ 1643.964988] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1643.964988] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1643.964988] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1643.964988] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] self._fetch_image_if_missing(context, vi) [ 1643.964988] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1643.964988] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] image_cache(vi, tmp_image_ds_loc) [ 1643.964988] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1643.965285] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] vm_util.copy_virtual_disk( [ 1643.965285] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1643.965285] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] session._wait_for_task(vmdk_copy_task) [ 1643.965285] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1643.965285] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] return self.wait_for_task(task_ref) [ 1643.965285] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1643.965285] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] return evt.wait() [ 1643.965285] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1643.965285] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] result = hub.switch() [ 1643.965285] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1643.965285] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] return self.greenlet.switch() [ 1643.965285] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1643.965285] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] self.f(*self.args, **self.kw) [ 1643.965572] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1643.965572] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] raise exceptions.translate_fault(task_info.error) [ 1643.965572] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1643.965572] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Faults: ['InvalidArgument'] [ 1643.965572] env[61215]: ERROR nova.compute.manager [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] [ 1643.965746] env[61215]: DEBUG nova.compute.utils [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] VimFaultException {{(pid=61215) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1643.967239] env[61215]: DEBUG nova.compute.manager [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Build of instance 67068a42-eba7-4529-9ebf-43d6865362b1 was re-scheduled: A specified parameter was not correct: fileType [ 1643.967239] env[61215]: Faults: ['InvalidArgument'] {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1643.967926] env[61215]: DEBUG nova.compute.manager [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Unplugging VIFs for instance {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1643.967926] env[61215]: DEBUG nova.compute.manager [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1643.967926] env[61215]: DEBUG nova.compute.manager [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1643.968254] env[61215]: DEBUG nova.network.neutron [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1644.404933] env[61215]: DEBUG nova.network.neutron [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1644.419030] env[61215]: INFO nova.compute.manager [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Took 0.45 seconds to deallocate network for instance. [ 1644.533023] env[61215]: INFO nova.scheduler.client.report [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Deleted allocations for instance 67068a42-eba7-4529-9ebf-43d6865362b1 [ 1644.555269] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9f137aaf-b9f8-4f37-bb9a-426eb265e993 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Lock "67068a42-eba7-4529-9ebf-43d6865362b1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 384.379s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1644.556627] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a82dff54-aca3-42a3-9ae1-818021b01ae2 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Lock "67068a42-eba7-4529-9ebf-43d6865362b1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 187.104s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1644.556853] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a82dff54-aca3-42a3-9ae1-818021b01ae2 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Acquiring lock "67068a42-eba7-4529-9ebf-43d6865362b1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1644.557072] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a82dff54-aca3-42a3-9ae1-818021b01ae2 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Lock "67068a42-eba7-4529-9ebf-43d6865362b1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1644.557243] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a82dff54-aca3-42a3-9ae1-818021b01ae2 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Lock "67068a42-eba7-4529-9ebf-43d6865362b1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1644.559354] env[61215]: INFO nova.compute.manager [None req-a82dff54-aca3-42a3-9ae1-818021b01ae2 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Terminating instance [ 1644.561184] env[61215]: DEBUG nova.compute.manager [None req-a82dff54-aca3-42a3-9ae1-818021b01ae2 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1644.561553] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-a82dff54-aca3-42a3-9ae1-818021b01ae2 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1644.561871] env[61215]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6365106d-f515-4931-8c9f-046dfb5b447b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.566335] env[61215]: DEBUG nova.compute.manager [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1644.578017] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fab53e7-ed5f-482b-92b7-fdbb2669416a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.605750] env[61215]: WARNING nova.virt.vmwareapi.vmops [None req-a82dff54-aca3-42a3-9ae1-818021b01ae2 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 67068a42-eba7-4529-9ebf-43d6865362b1 could not be found. [ 1644.605981] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-a82dff54-aca3-42a3-9ae1-818021b01ae2 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1644.606185] env[61215]: INFO nova.compute.manager [None req-a82dff54-aca3-42a3-9ae1-818021b01ae2 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1644.606436] env[61215]: DEBUG oslo.service.loopingcall [None req-a82dff54-aca3-42a3-9ae1-818021b01ae2 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1644.607288] env[61215]: DEBUG nova.compute.manager [-] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1644.607390] env[61215]: DEBUG nova.network.neutron [-] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1644.624774] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1644.624774] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1644.625238] env[61215]: INFO nova.compute.claims [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1644.637079] env[61215]: DEBUG nova.network.neutron [-] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1644.651086] env[61215]: INFO nova.compute.manager [-] [instance: 67068a42-eba7-4529-9ebf-43d6865362b1] Took 0.04 seconds to deallocate network for instance. [ 1644.750714] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a82dff54-aca3-42a3-9ae1-818021b01ae2 tempest-ServerExternalEventsTest-1297225472 tempest-ServerExternalEventsTest-1297225472-project-member] Lock "67068a42-eba7-4529-9ebf-43d6865362b1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.194s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1645.006282] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15e84e77-8ec2-4c12-816f-0384d5a177d3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.014246] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-978416ed-1c82-47f3-b1c4-31ca83577004 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.045870] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b149d8ba-b49c-46d6-b513-b65733f69b45 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.053113] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa4cce6b-bb84-4af6-b2f5-004f0e71fd6f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.066211] env[61215]: DEBUG nova.compute.provider_tree [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1645.075558] env[61215]: DEBUG nova.scheduler.client.report [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1645.087991] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.464s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1645.088482] env[61215]: DEBUG nova.compute.manager [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Start building networks asynchronously for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1645.123423] env[61215]: DEBUG nova.compute.utils [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Using /dev/sd instead of None {{(pid=61215) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1645.125064] env[61215]: DEBUG nova.compute.manager [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Allocating IP information in the background. {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1645.125155] env[61215]: DEBUG nova.network.neutron [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] allocate_for_instance() {{(pid=61215) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1645.133332] env[61215]: DEBUG nova.compute.manager [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Start building block device mappings for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1645.195710] env[61215]: DEBUG nova.policy [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c9db5492250b426c80f611d7a5686c1b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd3eac98da0cb41cbad12d92e9151b143', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61215) authorize /opt/stack/nova/nova/policy.py:203}} [ 1645.198758] env[61215]: DEBUG nova.compute.manager [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Start spawning the instance on the hypervisor. {{(pid=61215) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1645.225229] env[61215]: DEBUG nova.virt.hardware [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:05:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:04:45Z,direct_url=,disk_format='vmdk',id=e91f0c25-9ff9-4937-8440-f47cfb2028bc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9bc3f57c82894c5d9e08b66e77a25dc5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:04:46Z,virtual_size=,visibility=), allow threads: False {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1645.225492] env[61215]: DEBUG nova.virt.hardware [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Flavor limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1645.225656] env[61215]: DEBUG nova.virt.hardware [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Image limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1645.225841] env[61215]: DEBUG nova.virt.hardware [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Flavor pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1645.225991] env[61215]: DEBUG nova.virt.hardware [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Image pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1645.226157] env[61215]: DEBUG nova.virt.hardware [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1645.226367] env[61215]: DEBUG nova.virt.hardware [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1645.226532] env[61215]: DEBUG nova.virt.hardware [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1645.226832] env[61215]: DEBUG nova.virt.hardware [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Got 1 possible topologies {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1645.227066] env[61215]: DEBUG nova.virt.hardware [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1645.227311] env[61215]: DEBUG nova.virt.hardware [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1645.228967] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-966c87b6-b672-4e08-b9d3-0c96d6469bf0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.236303] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e596088-8dbe-4ef9-9b7c-709b92418795 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.813138] env[61215]: DEBUG nova.network.neutron [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Successfully created port: cac46034-bcff-4025-81ad-d36c3ce0386a {{(pid=61215) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1646.728155] env[61215]: DEBUG nova.compute.manager [req-5ce47a37-b51e-4a95-8bd6-a4875d6df233 req-58ed96c0-7bc1-4852-bc08-559ec47fa9fc service nova] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Received event network-vif-plugged-cac46034-bcff-4025-81ad-d36c3ce0386a {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1646.728383] env[61215]: DEBUG oslo_concurrency.lockutils [req-5ce47a37-b51e-4a95-8bd6-a4875d6df233 req-58ed96c0-7bc1-4852-bc08-559ec47fa9fc service nova] Acquiring lock "c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1646.728597] env[61215]: DEBUG oslo_concurrency.lockutils [req-5ce47a37-b51e-4a95-8bd6-a4875d6df233 req-58ed96c0-7bc1-4852-bc08-559ec47fa9fc service nova] Lock "c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1646.728821] env[61215]: DEBUG oslo_concurrency.lockutils [req-5ce47a37-b51e-4a95-8bd6-a4875d6df233 req-58ed96c0-7bc1-4852-bc08-559ec47fa9fc service nova] Lock "c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1646.729022] env[61215]: DEBUG nova.compute.manager [req-5ce47a37-b51e-4a95-8bd6-a4875d6df233 req-58ed96c0-7bc1-4852-bc08-559ec47fa9fc service nova] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] No waiting events found dispatching network-vif-plugged-cac46034-bcff-4025-81ad-d36c3ce0386a {{(pid=61215) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1646.729630] env[61215]: WARNING nova.compute.manager [req-5ce47a37-b51e-4a95-8bd6-a4875d6df233 req-58ed96c0-7bc1-4852-bc08-559ec47fa9fc service nova] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Received unexpected event network-vif-plugged-cac46034-bcff-4025-81ad-d36c3ce0386a for instance with vm_state building and task_state spawning. [ 1646.880275] env[61215]: DEBUG nova.network.neutron [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Successfully updated port: cac46034-bcff-4025-81ad-d36c3ce0386a {{(pid=61215) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1646.893946] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquiring lock "refresh_cache-c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1646.894160] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquired lock "refresh_cache-c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1646.894352] env[61215]: DEBUG nova.network.neutron [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1646.981518] env[61215]: DEBUG nova.network.neutron [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1647.293582] env[61215]: DEBUG nova.network.neutron [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Updating instance_info_cache with network_info: [{"id": "cac46034-bcff-4025-81ad-d36c3ce0386a", "address": "fa:16:3e:bf:c0:72", "network": {"id": "a742dd7e-4910-4f56-91c4-28c6da7554f1", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2129807098-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3eac98da0cb41cbad12d92e9151b143", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcac46034-bc", "ovs_interfaceid": "cac46034-bcff-4025-81ad-d36c3ce0386a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1647.310526] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Releasing lock "refresh_cache-c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1647.310834] env[61215]: DEBUG nova.compute.manager [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Instance network_info: |[{"id": "cac46034-bcff-4025-81ad-d36c3ce0386a", "address": "fa:16:3e:bf:c0:72", "network": {"id": "a742dd7e-4910-4f56-91c4-28c6da7554f1", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2129807098-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3eac98da0cb41cbad12d92e9151b143", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcac46034-bc", "ovs_interfaceid": "cac46034-bcff-4025-81ad-d36c3ce0386a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1647.311289] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bf:c0:72', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b98c49ac-0eb7-4311-aa8f-60581b2ce706', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cac46034-bcff-4025-81ad-d36c3ce0386a', 'vif_model': 'vmxnet3'}] {{(pid=61215) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1647.319407] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Creating folder: Project (d3eac98da0cb41cbad12d92e9151b143). Parent ref: group-v352463. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1647.320030] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-504fbccb-85bc-4b01-9b2e-98caf7042f27 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.332808] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Created folder: Project (d3eac98da0cb41cbad12d92e9151b143) in parent group-v352463. [ 1647.332997] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Creating folder: Instances. Parent ref: group-v352521. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1647.333236] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-45e5ecab-f5e0-4d7b-b2fd-d6e8f1c0d744 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.344690] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Created folder: Instances in parent group-v352521. [ 1647.344938] env[61215]: DEBUG oslo.service.loopingcall [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1647.345313] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Creating VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1647.345587] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fbf2cab2-3eff-4eb1-afd5-78627582ad4d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.366580] env[61215]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1647.366580] env[61215]: value = "task-1690351" [ 1647.366580] env[61215]: _type = "Task" [ 1647.366580] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.375943] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690351, 'name': CreateVM_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.876823] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690351, 'name': CreateVM_Task, 'duration_secs': 0.312083} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.877023] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Created VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1647.877695] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1647.877893] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1647.878186] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1647.878429] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d4b0209-1dd8-49a9-a7ef-287c6c129d64 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.882766] env[61215]: DEBUG oslo_vmware.api [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Waiting for the task: (returnval){ [ 1647.882766] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52272474-09c0-40ac-30d3-83c33b1d39fc" [ 1647.882766] env[61215]: _type = "Task" [ 1647.882766] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.890762] env[61215]: DEBUG oslo_vmware.api [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52272474-09c0-40ac-30d3-83c33b1d39fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.394013] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1648.394322] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Processing image e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1648.394511] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1648.889836] env[61215]: DEBUG nova.compute.manager [req-c87a301c-c349-4a85-8661-a13e93bc4518 req-c9032b8e-1cc9-4a5a-960b-22f20f3b07c2 service nova] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Received event network-changed-cac46034-bcff-4025-81ad-d36c3ce0386a {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1648.890125] env[61215]: DEBUG nova.compute.manager [req-c87a301c-c349-4a85-8661-a13e93bc4518 req-c9032b8e-1cc9-4a5a-960b-22f20f3b07c2 service nova] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Refreshing instance network info cache due to event network-changed-cac46034-bcff-4025-81ad-d36c3ce0386a. {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 1648.890354] env[61215]: DEBUG oslo_concurrency.lockutils [req-c87a301c-c349-4a85-8661-a13e93bc4518 req-c9032b8e-1cc9-4a5a-960b-22f20f3b07c2 service nova] Acquiring lock "refresh_cache-c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1648.890501] env[61215]: DEBUG oslo_concurrency.lockutils [req-c87a301c-c349-4a85-8661-a13e93bc4518 req-c9032b8e-1cc9-4a5a-960b-22f20f3b07c2 service nova] Acquired lock "refresh_cache-c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1648.890682] env[61215]: DEBUG nova.network.neutron [req-c87a301c-c349-4a85-8661-a13e93bc4518 req-c9032b8e-1cc9-4a5a-960b-22f20f3b07c2 service nova] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Refreshing network info cache for port cac46034-bcff-4025-81ad-d36c3ce0386a {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1649.437989] env[61215]: DEBUG nova.network.neutron [req-c87a301c-c349-4a85-8661-a13e93bc4518 req-c9032b8e-1cc9-4a5a-960b-22f20f3b07c2 service nova] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Updated VIF entry in instance network info cache for port cac46034-bcff-4025-81ad-d36c3ce0386a. {{(pid=61215) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1649.438410] env[61215]: DEBUG nova.network.neutron [req-c87a301c-c349-4a85-8661-a13e93bc4518 req-c9032b8e-1cc9-4a5a-960b-22f20f3b07c2 service nova] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Updating instance_info_cache with network_info: [{"id": "cac46034-bcff-4025-81ad-d36c3ce0386a", "address": "fa:16:3e:bf:c0:72", "network": {"id": "a742dd7e-4910-4f56-91c4-28c6da7554f1", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2129807098-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3eac98da0cb41cbad12d92e9151b143", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcac46034-bc", "ovs_interfaceid": "cac46034-bcff-4025-81ad-d36c3ce0386a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1649.448086] env[61215]: DEBUG oslo_concurrency.lockutils [req-c87a301c-c349-4a85-8661-a13e93bc4518 req-c9032b8e-1cc9-4a5a-960b-22f20f3b07c2 service nova] Releasing lock "refresh_cache-c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1651.329957] env[61215]: DEBUG oslo_concurrency.lockutils [None req-3e71cd7a-4acb-41f0-b94e-c60ace9c155c tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Acquiring lock "8d4665c7-67de-4ab3-a8b7-596a5e1152ce" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1654.806127] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Acquiring lock "bb56c470-9f85-44b1-b1ec-f44236e9de51" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1654.806127] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Lock "bb56c470-9f85-44b1-b1ec-f44236e9de51" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1665.477123] env[61215]: DEBUG oslo_concurrency.lockutils [None req-18546486-f39c-4bd7-b349-94556da42d67 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Acquiring lock "d0a5229f-8da2-40bb-af99-28f32923892f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1665.477430] env[61215]: DEBUG oslo_concurrency.lockutils [None req-18546486-f39c-4bd7-b349-94556da42d67 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Lock "d0a5229f-8da2-40bb-af99-28f32923892f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1672.379075] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1672.650238] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1673.655322] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1673.655322] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61215) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 1674.654608] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1674.654860] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1674.655052] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1675.657155] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1675.657444] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Starting heal instance info cache {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 1675.657444] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Rebuilding the list of instances to heal {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1675.681036] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1675.681036] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1675.681036] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1675.681036] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1675.682086] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1675.682427] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1675.682427] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1675.682526] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1675.682647] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1675.682764] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1675.682883] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Didn't find any instances for network info cache update. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 1676.655836] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1676.681113] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1678.654601] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1678.669631] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1678.669631] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1678.669631] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1678.669793] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61215) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1678.670912] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2ec43b8-d19d-4a03-ad5d-e15af2f84ef3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.683595] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff499287-42e6-4128-aa10-c22266a19e8a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.708485] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0a77d94-adde-464e-9060-a90d62f9aaf3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.718855] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf42a2ad-ad44-4f43-b1b0-377800e297f0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.728452] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ed62679c-a7c6-4d81-8ec7-280920a68661 tempest-ServerActionsTestJSON-690131439 tempest-ServerActionsTestJSON-690131439-project-member] Acquiring lock "ddaa12c8-88c6-4ba0-beec-cad92acd9768" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1678.728692] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ed62679c-a7c6-4d81-8ec7-280920a68661 tempest-ServerActionsTestJSON-690131439 tempest-ServerActionsTestJSON-690131439-project-member] Lock "ddaa12c8-88c6-4ba0-beec-cad92acd9768" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1678.759995] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181323MB free_disk=173GB free_vcpus=48 pci_devices=None {{(pid=61215) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1678.759995] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1678.759995] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1678.854933] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 97dae204-f706-41b5-bf9f-b320d022b2f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1678.855219] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 82698789-4c08-453b-a973-1916d1f94af6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1678.855508] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 0223d7b6-12e1-4418-97f2-012ed41daa7a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1678.855674] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1678.855963] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1678.856157] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance d49f702b-cd29-4491-938c-0291b351ef20 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1678.856250] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1678.856749] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1678.856749] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 8d4665c7-67de-4ab3-a8b7-596a5e1152ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1678.856749] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1678.869469] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 486e9745-b512-48ad-852a-166a7d63cf5e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1678.881156] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 5791e2e4-8ec9-4a4d-9ed9-afa829cdd6da has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1678.894513] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 2e186217-c1e1-40c6-8d84-988f35f6b93d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1678.910989] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance ad606ad3-d291-4a71-91d5-850a9795f301 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1678.922726] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance d8f5d964-fbcd-45fc-acb7-a0ca9e01b615 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1678.931300] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 4cdbd5ad-fbbb-4cee-811c-60cf47094cad has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1678.943164] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance dfbd190d-8565-4272-8320-eef68d00b9a1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1678.954549] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 26c74fa5-69fd-4a83-9ef7-8ed4103b5460 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1678.968033] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 008007b0-7ff0-4711-80dc-707efea20e75 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1678.978647] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance a0b2bf99-e82a-4866-844b-0e5ed758e78c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1678.989597] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 953e0804-8220-4fb0-a4af-8956be949a54 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1679.001871] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 3cf6ec8a-489b-4102-9fcb-581587345fc5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1679.019688] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 772cf4c4-cdc2-4a00-8891-908b31827a7b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1679.031983] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance eb4e172e-1eb1-4e31-a311-96f772f1a196 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1679.043764] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1679.053129] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance c233ab81-232d-49be-a176-bf846f0d8cc3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1679.066300] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance bb56c470-9f85-44b1-b1ec-f44236e9de51 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1679.077549] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance d0a5229f-8da2-40bb-af99-28f32923892f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1679.094424] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance ddaa12c8-88c6-4ba0-beec-cad92acd9768 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1679.094525] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1679.094628] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1679.531194] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-812c2c34-440d-4dad-8c05-5c6565637c37 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.538816] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fae9b7f-ba94-493c-a359-75b7b9fa9a92 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.571710] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4253efe-8591-412d-9c7a-d6a247e91927 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.580100] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77e564d2-ed20-400f-a8e4-4afa81731972 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.593624] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1679.605413] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1679.628061] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61215) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1679.628061] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.865s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1685.545463] env[61215]: DEBUG oslo_concurrency.lockutils [None req-b25645e4-d0d2-4489-9385-fab3f982d51b tempest-ServerRescueTestJSON-1345079088 tempest-ServerRescueTestJSON-1345079088-project-member] Acquiring lock "a17827ee-8ad2-459b-ba7d-f9f9be429e64" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1685.548729] env[61215]: DEBUG oslo_concurrency.lockutils [None req-b25645e4-d0d2-4489-9385-fab3f982d51b tempest-ServerRescueTestJSON-1345079088 tempest-ServerRescueTestJSON-1345079088-project-member] Lock "a17827ee-8ad2-459b-ba7d-f9f9be429e64" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1685.914600] env[61215]: DEBUG oslo_concurrency.lockutils [None req-941ddaa9-12ca-4945-bfe6-ebc4cd0cab74 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Acquiring lock "96a536ea-a1c7-470e-8873-bc1e723efefa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1685.914968] env[61215]: DEBUG oslo_concurrency.lockutils [None req-941ddaa9-12ca-4945-bfe6-ebc4cd0cab74 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Lock "96a536ea-a1c7-470e-8873-bc1e723efefa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1691.123589] env[61215]: WARNING oslo_vmware.rw_handles [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1691.123589] env[61215]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1691.123589] env[61215]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1691.123589] env[61215]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1691.123589] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1691.123589] env[61215]: ERROR oslo_vmware.rw_handles response.begin() [ 1691.123589] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1691.123589] env[61215]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1691.123589] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1691.123589] env[61215]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1691.123589] env[61215]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1691.123589] env[61215]: ERROR oslo_vmware.rw_handles [ 1691.123589] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Downloaded image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to vmware_temp/1930bddc-fb7b-4df6-ba78-8e57e88245d1/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1691.127177] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Caching image {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1691.127177] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Copying Virtual Disk [datastore1] vmware_temp/1930bddc-fb7b-4df6-ba78-8e57e88245d1/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk to [datastore1] vmware_temp/1930bddc-fb7b-4df6-ba78-8e57e88245d1/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk {{(pid=61215) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1691.127177] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-be24a1f2-81aa-44d8-8a59-507e3fe499e5 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.135444] env[61215]: DEBUG oslo_vmware.api [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Waiting for the task: (returnval){ [ 1691.135444] env[61215]: value = "task-1690352" [ 1691.135444] env[61215]: _type = "Task" [ 1691.135444] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1691.143930] env[61215]: DEBUG oslo_vmware.api [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Task: {'id': task-1690352, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.646287] env[61215]: DEBUG oslo_vmware.exceptions [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Fault InvalidArgument not matched. {{(pid=61215) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1691.646287] env[61215]: DEBUG oslo_concurrency.lockutils [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1691.646735] env[61215]: ERROR nova.compute.manager [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1691.646735] env[61215]: Faults: ['InvalidArgument'] [ 1691.646735] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Traceback (most recent call last): [ 1691.646735] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1691.646735] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] yield resources [ 1691.646735] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1691.646735] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] self.driver.spawn(context, instance, image_meta, [ 1691.646735] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1691.646735] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1691.646735] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1691.646735] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] self._fetch_image_if_missing(context, vi) [ 1691.646735] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1691.647061] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] image_cache(vi, tmp_image_ds_loc) [ 1691.647061] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1691.647061] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] vm_util.copy_virtual_disk( [ 1691.647061] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1691.647061] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] session._wait_for_task(vmdk_copy_task) [ 1691.647061] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1691.647061] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] return self.wait_for_task(task_ref) [ 1691.647061] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1691.647061] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] return evt.wait() [ 1691.647061] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1691.647061] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] result = hub.switch() [ 1691.647061] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1691.647061] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] return self.greenlet.switch() [ 1691.647445] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1691.647445] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] self.f(*self.args, **self.kw) [ 1691.647445] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1691.647445] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] raise exceptions.translate_fault(task_info.error) [ 1691.647445] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1691.647445] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Faults: ['InvalidArgument'] [ 1691.647445] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] [ 1691.647445] env[61215]: INFO nova.compute.manager [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Terminating instance [ 1691.648758] env[61215]: DEBUG oslo_concurrency.lockutils [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1691.648964] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1691.651464] env[61215]: DEBUG nova.compute.manager [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1691.651464] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1691.651464] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2ce2dd8b-1cfd-4886-9b7a-b90a80a6f391 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.654195] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d32e17a-a057-464d-b143-d5413605001d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.661189] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Unregistering the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1691.661189] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-444a12cf-2e34-4d37-9468-44b97690127e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.662952] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1691.663139] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61215) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1691.664081] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29766478-7ab6-4f87-85ec-f34b17fcc114 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.668707] env[61215]: DEBUG oslo_vmware.api [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Waiting for the task: (returnval){ [ 1691.668707] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52121fc3-a60e-22a7-befd-9e4a96b1e37a" [ 1691.668707] env[61215]: _type = "Task" [ 1691.668707] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1691.675561] env[61215]: DEBUG oslo_vmware.api [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52121fc3-a60e-22a7-befd-9e4a96b1e37a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.725640] env[61215]: DEBUG oslo_concurrency.lockutils [None req-93043f15-32f4-44d4-9654-69a75a32cad9 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquiring lock "c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1692.181023] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Preparing fetch location {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1692.181023] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Creating directory with path [datastore1] vmware_temp/00c7e34a-aca4-4eb2-b284-c6d646988f0b/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1692.181023] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ecc8b379-7e84-441a-bdcc-00fa28ba1c59 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.200333] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Created directory with path [datastore1] vmware_temp/00c7e34a-aca4-4eb2-b284-c6d646988f0b/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1692.200536] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Fetch image to [datastore1] vmware_temp/00c7e34a-aca4-4eb2-b284-c6d646988f0b/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1692.200710] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to [datastore1] vmware_temp/00c7e34a-aca4-4eb2-b284-c6d646988f0b/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1692.201500] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e2a713b-30e2-445b-b842-a9d435ce70c5 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.208246] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61a20069-ad96-4a95-9665-a9ce3922ca25 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.217478] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80e44403-2e55-4f74-9987-540801c27141 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.248496] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80cf227d-a547-4f1f-9a57-413c9e421eb7 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.254462] env[61215]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f8e2c3d8-4b02-46d0-9795-498325f2e309 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.283551] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1692.332831] env[61215]: DEBUG oslo_vmware.rw_handles [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/00c7e34a-aca4-4eb2-b284-c6d646988f0b/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1692.394221] env[61215]: DEBUG oslo_vmware.rw_handles [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Completed reading data from the image iterator. {{(pid=61215) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1692.394431] env[61215]: DEBUG oslo_vmware.rw_handles [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/00c7e34a-aca4-4eb2-b284-c6d646988f0b/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1692.674121] env[61215]: DEBUG oslo_concurrency.lockutils [None req-353adf5e-a541-4e36-8412-52317d627ef9 tempest-AttachVolumeShelveTestJSON-1147171237 tempest-AttachVolumeShelveTestJSON-1147171237-project-member] Acquiring lock "40fe7cd9-7c99-4add-a2eb-429ff2aba7a2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1692.674376] env[61215]: DEBUG oslo_concurrency.lockutils [None req-353adf5e-a541-4e36-8412-52317d627ef9 tempest-AttachVolumeShelveTestJSON-1147171237 tempest-AttachVolumeShelveTestJSON-1147171237-project-member] Lock "40fe7cd9-7c99-4add-a2eb-429ff2aba7a2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1693.346626] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Unregistered the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1693.346960] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Deleting contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1693.347038] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Deleting the datastore file [datastore1] 97dae204-f706-41b5-bf9f-b320d022b2f3 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1693.347391] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-73692294-4bd9-427a-8f44-b1083aa40736 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.353915] env[61215]: DEBUG oslo_vmware.api [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Waiting for the task: (returnval){ [ 1693.353915] env[61215]: value = "task-1690354" [ 1693.353915] env[61215]: _type = "Task" [ 1693.353915] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1693.361635] env[61215]: DEBUG oslo_vmware.api [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Task: {'id': task-1690354, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.865165] env[61215]: DEBUG oslo_vmware.api [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Task: {'id': task-1690354, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.097012} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1693.865165] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1693.867893] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Deleted contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1693.867893] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1693.867893] env[61215]: INFO nova.compute.manager [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Took 2.22 seconds to destroy the instance on the hypervisor. [ 1693.874020] env[61215]: DEBUG nova.compute.claims [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Aborting claim: {{(pid=61215) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1693.874020] env[61215]: DEBUG oslo_concurrency.lockutils [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1693.874020] env[61215]: DEBUG oslo_concurrency.lockutils [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1694.381143] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c91ae136-8093-4bf5-bbd4-bedad7245bd1 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.388780] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cacf527a-2653-4c4f-875d-5c771b82bc17 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.419620] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16408305-1ffd-4fff-8098-c3361f7e273e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.426924] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0745db73-7f52-400d-a92d-5dee21a4c821 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.441812] env[61215]: DEBUG nova.compute.provider_tree [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1694.450666] env[61215]: DEBUG nova.scheduler.client.report [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1694.469316] env[61215]: DEBUG oslo_concurrency.lockutils [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.597s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1694.469849] env[61215]: ERROR nova.compute.manager [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1694.469849] env[61215]: Faults: ['InvalidArgument'] [ 1694.469849] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Traceback (most recent call last): [ 1694.469849] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1694.469849] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] self.driver.spawn(context, instance, image_meta, [ 1694.469849] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1694.469849] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1694.469849] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1694.469849] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] self._fetch_image_if_missing(context, vi) [ 1694.469849] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1694.469849] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] image_cache(vi, tmp_image_ds_loc) [ 1694.469849] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1694.470286] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] vm_util.copy_virtual_disk( [ 1694.470286] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1694.470286] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] session._wait_for_task(vmdk_copy_task) [ 1694.470286] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1694.470286] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] return self.wait_for_task(task_ref) [ 1694.470286] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1694.470286] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] return evt.wait() [ 1694.470286] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1694.470286] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] result = hub.switch() [ 1694.470286] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1694.470286] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] return self.greenlet.switch() [ 1694.470286] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1694.470286] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] self.f(*self.args, **self.kw) [ 1694.470590] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1694.470590] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] raise exceptions.translate_fault(task_info.error) [ 1694.470590] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1694.470590] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Faults: ['InvalidArgument'] [ 1694.470590] env[61215]: ERROR nova.compute.manager [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] [ 1694.470710] env[61215]: DEBUG nova.compute.utils [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] VimFaultException {{(pid=61215) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1694.472095] env[61215]: DEBUG nova.compute.manager [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Build of instance 97dae204-f706-41b5-bf9f-b320d022b2f3 was re-scheduled: A specified parameter was not correct: fileType [ 1694.472095] env[61215]: Faults: ['InvalidArgument'] {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1694.472476] env[61215]: DEBUG nova.compute.manager [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Unplugging VIFs for instance {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1694.472654] env[61215]: DEBUG nova.compute.manager [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1694.472828] env[61215]: DEBUG nova.compute.manager [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1694.472993] env[61215]: DEBUG nova.network.neutron [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1696.025053] env[61215]: DEBUG nova.network.neutron [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1696.044844] env[61215]: INFO nova.compute.manager [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Took 1.57 seconds to deallocate network for instance. [ 1696.179205] env[61215]: INFO nova.scheduler.client.report [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Deleted allocations for instance 97dae204-f706-41b5-bf9f-b320d022b2f3 [ 1696.213097] env[61215]: DEBUG oslo_concurrency.lockutils [None req-58c95c5e-4884-4795-b4e8-439653ac0ed9 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Lock "97dae204-f706-41b5-bf9f-b320d022b2f3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 427.237s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1696.214562] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9453684d-6010-41d2-91ac-923c13fc9127 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Lock "97dae204-f706-41b5-bf9f-b320d022b2f3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 227.825s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1696.214562] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9453684d-6010-41d2-91ac-923c13fc9127 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Acquiring lock "97dae204-f706-41b5-bf9f-b320d022b2f3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1696.215259] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9453684d-6010-41d2-91ac-923c13fc9127 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Lock "97dae204-f706-41b5-bf9f-b320d022b2f3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1696.215259] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9453684d-6010-41d2-91ac-923c13fc9127 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Lock "97dae204-f706-41b5-bf9f-b320d022b2f3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1696.222036] env[61215]: INFO nova.compute.manager [None req-9453684d-6010-41d2-91ac-923c13fc9127 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Terminating instance [ 1696.224295] env[61215]: DEBUG nova.compute.manager [None req-9453684d-6010-41d2-91ac-923c13fc9127 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1696.224502] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9453684d-6010-41d2-91ac-923c13fc9127 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1696.224767] env[61215]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cc6156ca-4a56-4a33-99ba-29fc370d9f60 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.228393] env[61215]: DEBUG nova.compute.manager [None req-f9431c3a-9db2-459b-94fe-c7872ab25c05 tempest-SecurityGroupsTestJSON-1182727218 tempest-SecurityGroupsTestJSON-1182727218-project-member] [instance: 486e9745-b512-48ad-852a-166a7d63cf5e] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1696.239636] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7414751f-0ffc-449f-80af-659c8e7126c7 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.264751] env[61215]: DEBUG nova.compute.manager [None req-f9431c3a-9db2-459b-94fe-c7872ab25c05 tempest-SecurityGroupsTestJSON-1182727218 tempest-SecurityGroupsTestJSON-1182727218-project-member] [instance: 486e9745-b512-48ad-852a-166a7d63cf5e] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1696.279243] env[61215]: WARNING nova.virt.vmwareapi.vmops [None req-9453684d-6010-41d2-91ac-923c13fc9127 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 97dae204-f706-41b5-bf9f-b320d022b2f3 could not be found. [ 1696.279243] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9453684d-6010-41d2-91ac-923c13fc9127 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1696.279243] env[61215]: INFO nova.compute.manager [None req-9453684d-6010-41d2-91ac-923c13fc9127 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1696.279243] env[61215]: DEBUG oslo.service.loopingcall [None req-9453684d-6010-41d2-91ac-923c13fc9127 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1696.281736] env[61215]: DEBUG nova.compute.manager [-] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1696.281736] env[61215]: DEBUG nova.network.neutron [-] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1696.293167] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f9431c3a-9db2-459b-94fe-c7872ab25c05 tempest-SecurityGroupsTestJSON-1182727218 tempest-SecurityGroupsTestJSON-1182727218-project-member] Lock "486e9745-b512-48ad-852a-166a7d63cf5e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 201.525s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1696.307392] env[61215]: DEBUG nova.compute.manager [None req-d752b425-c63c-4530-b33e-5d201854e03b tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] [instance: 5791e2e4-8ec9-4a4d-9ed9-afa829cdd6da] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1696.311963] env[61215]: DEBUG nova.network.neutron [-] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1696.322171] env[61215]: INFO nova.compute.manager [-] [instance: 97dae204-f706-41b5-bf9f-b320d022b2f3] Took 0.04 seconds to deallocate network for instance. [ 1696.342911] env[61215]: DEBUG nova.compute.manager [None req-d752b425-c63c-4530-b33e-5d201854e03b tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] [instance: 5791e2e4-8ec9-4a4d-9ed9-afa829cdd6da] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1696.370124] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d752b425-c63c-4530-b33e-5d201854e03b tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] Lock "5791e2e4-8ec9-4a4d-9ed9-afa829cdd6da" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 199.626s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1696.383254] env[61215]: DEBUG nova.compute.manager [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1696.435348] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9453684d-6010-41d2-91ac-923c13fc9127 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Lock "97dae204-f706-41b5-bf9f-b320d022b2f3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.221s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1696.455900] env[61215]: DEBUG oslo_concurrency.lockutils [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1696.456078] env[61215]: DEBUG oslo_concurrency.lockutils [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1696.457627] env[61215]: INFO nova.compute.claims [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1696.948755] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-963d90fa-850e-4cb7-a1f3-2500d842a9df {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.958845] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eacd01c7-d64c-42bc-8cef-f2acdf55a055 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.996778] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a9ebbda-ebf8-4f85-9aad-94422bdf81d0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.005298] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-002bc165-1183-4b1f-94b0-bdbac99eebc1 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.019203] env[61215]: DEBUG nova.compute.provider_tree [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1697.029714] env[61215]: DEBUG nova.scheduler.client.report [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1697.046822] env[61215]: DEBUG oslo_concurrency.lockutils [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.591s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1697.047324] env[61215]: DEBUG nova.compute.manager [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Start building networks asynchronously for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1697.094395] env[61215]: DEBUG nova.compute.utils [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Using /dev/sd instead of None {{(pid=61215) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1697.094395] env[61215]: DEBUG nova.compute.manager [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Not allocating networking since 'none' was specified. {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1969}} [ 1697.103776] env[61215]: DEBUG nova.compute.manager [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Start building block device mappings for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1697.178039] env[61215]: DEBUG nova.compute.manager [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Start spawning the instance on the hypervisor. {{(pid=61215) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1697.205210] env[61215]: DEBUG nova.virt.hardware [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:05:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:04:45Z,direct_url=,disk_format='vmdk',id=e91f0c25-9ff9-4937-8440-f47cfb2028bc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9bc3f57c82894c5d9e08b66e77a25dc5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:04:46Z,virtual_size=,visibility=), allow threads: False {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1697.205580] env[61215]: DEBUG nova.virt.hardware [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Flavor limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1697.205686] env[61215]: DEBUG nova.virt.hardware [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Image limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1697.206381] env[61215]: DEBUG nova.virt.hardware [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Flavor pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1697.206381] env[61215]: DEBUG nova.virt.hardware [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Image pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1697.206381] env[61215]: DEBUG nova.virt.hardware [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1697.206381] env[61215]: DEBUG nova.virt.hardware [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1697.207048] env[61215]: DEBUG nova.virt.hardware [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1697.207048] env[61215]: DEBUG nova.virt.hardware [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Got 1 possible topologies {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1697.207350] env[61215]: DEBUG nova.virt.hardware [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1697.207625] env[61215]: DEBUG nova.virt.hardware [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1697.208880] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42eaf25a-b214-4681-beed-48a4a24de129 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.217663] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e727a773-dd05-4e6a-96f8-80ce3e423666 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.231836] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Instance VIF info [] {{(pid=61215) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1697.237541] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Creating folder: Project (bb26c6fedc78496299757127a785617a). Parent ref: group-v352463. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1697.237820] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0cb8c448-e37f-413e-b91c-ae7796024255 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.249109] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Created folder: Project (bb26c6fedc78496299757127a785617a) in parent group-v352463. [ 1697.250090] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Creating folder: Instances. Parent ref: group-v352524. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1697.250090] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c088f1ea-aefa-4505-8748-85a82377c404 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.260305] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Created folder: Instances in parent group-v352524. [ 1697.260305] env[61215]: DEBUG oslo.service.loopingcall [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1697.260493] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Creating VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1697.261398] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-77e14197-c27a-4080-966c-a6c19f664912 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.278762] env[61215]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1697.278762] env[61215]: value = "task-1690357" [ 1697.278762] env[61215]: _type = "Task" [ 1697.278762] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1697.289656] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690357, 'name': CreateVM_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.794758] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690357, 'name': CreateVM_Task, 'duration_secs': 0.334581} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1697.795195] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Created VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1697.796033] env[61215]: DEBUG oslo_concurrency.lockutils [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1697.796410] env[61215]: DEBUG oslo_concurrency.lockutils [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1697.797386] env[61215]: DEBUG oslo_concurrency.lockutils [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1697.797788] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a089a18-ebab-48ff-8f94-4f810dcfa2c2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.804643] env[61215]: DEBUG oslo_vmware.api [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Waiting for the task: (returnval){ [ 1697.804643] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52195e73-ec78-9718-9b90-0030fb93c9d6" [ 1697.804643] env[61215]: _type = "Task" [ 1697.804643] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1697.814852] env[61215]: DEBUG oslo_vmware.api [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52195e73-ec78-9718-9b90-0030fb93c9d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.319291] env[61215]: DEBUG oslo_concurrency.lockutils [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1698.319772] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Processing image e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1698.320012] env[61215]: DEBUG oslo_concurrency.lockutils [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1698.344196] env[61215]: DEBUG oslo_concurrency.lockutils [None req-62d3796d-b92d-452e-891d-873362c48468 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] Acquiring lock "e9369a71-fc94-4cdd-82c6-6308783581c1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1698.344675] env[61215]: DEBUG oslo_concurrency.lockutils [None req-62d3796d-b92d-452e-891d-873362c48468 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] Lock "e9369a71-fc94-4cdd-82c6-6308783581c1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1702.232773] env[61215]: DEBUG oslo_concurrency.lockutils [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Acquiring lock "38fe96cf-e570-4c0e-af6f-2f199efc06ff" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1702.233248] env[61215]: DEBUG oslo_concurrency.lockutils [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Lock "38fe96cf-e570-4c0e-af6f-2f199efc06ff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1702.625637] env[61215]: DEBUG oslo_concurrency.lockutils [None req-fd389b6d-29ff-40c2-a2fd-178785d6ff08 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Acquiring lock "2e186217-c1e1-40c6-8d84-988f35f6b93d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1703.775461] env[61215]: DEBUG oslo_concurrency.lockutils [None req-21e8726b-da04-4770-9ccd-271b724f3f36 tempest-ServerRescueTestJSONUnderV235-11277943 tempest-ServerRescueTestJSONUnderV235-11277943-project-member] Acquiring lock "f91efd4b-851e-44bc-9cf2-7be8a2d2d7df" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1703.775461] env[61215]: DEBUG oslo_concurrency.lockutils [None req-21e8726b-da04-4770-9ccd-271b724f3f36 tempest-ServerRescueTestJSONUnderV235-11277943 tempest-ServerRescueTestJSONUnderV235-11277943-project-member] Lock "f91efd4b-851e-44bc-9cf2-7be8a2d2d7df" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1709.358813] env[61215]: DEBUG oslo_concurrency.lockutils [None req-11da0ca4-84ac-45fc-8239-2771138848fa tempest-ServerPasswordTestJSON-753560478 tempest-ServerPasswordTestJSON-753560478-project-member] Acquiring lock "0576d0b5-3890-4e1d-b208-40d46c2fdae7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1709.358813] env[61215]: DEBUG oslo_concurrency.lockutils [None req-11da0ca4-84ac-45fc-8239-2771138848fa tempest-ServerPasswordTestJSON-753560478 tempest-ServerPasswordTestJSON-753560478-project-member] Lock "0576d0b5-3890-4e1d-b208-40d46c2fdae7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1715.701852] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d898448a-6b40-4644-acae-d0e0930ae79a tempest-ServerShowV257Test-1987766564 tempest-ServerShowV257Test-1987766564-project-member] Acquiring lock "bf807d62-c8be-4819-9fc1-4b2d6d14cc39" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1715.702163] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d898448a-6b40-4644-acae-d0e0930ae79a tempest-ServerShowV257Test-1987766564 tempest-ServerShowV257Test-1987766564-project-member] Lock "bf807d62-c8be-4819-9fc1-4b2d6d14cc39" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1725.379229] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8d522f22-dbde-4362-b0a2-aeb7cde90dd3 tempest-AttachInterfacesTestJSON-1825426080 tempest-AttachInterfacesTestJSON-1825426080-project-member] Acquiring lock "12825ddf-86ee-4500-b43b-cf480dc54f3a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1725.379663] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8d522f22-dbde-4362-b0a2-aeb7cde90dd3 tempest-AttachInterfacesTestJSON-1825426080 tempest-AttachInterfacesTestJSON-1825426080-project-member] Lock "12825ddf-86ee-4500-b43b-cf480dc54f3a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1733.508578] env[61215]: DEBUG oslo_concurrency.lockutils [None req-e4c48f72-89b3-444c-b557-7d91448a2997 tempest-ServerShowV254Test-703576172 tempest-ServerShowV254Test-703576172-project-member] Acquiring lock "a42577f4-29ba-446b-a561-745ff14d1696" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1733.508925] env[61215]: DEBUG oslo_concurrency.lockutils [None req-e4c48f72-89b3-444c-b557-7d91448a2997 tempest-ServerShowV254Test-703576172 tempest-ServerShowV254Test-703576172-project-member] Lock "a42577f4-29ba-446b-a561-745ff14d1696" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1733.620314] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1733.620314] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1734.654197] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1734.654538] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1734.654592] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1734.654726] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61215) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 1736.655949] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1736.655949] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Starting heal instance info cache {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 1736.655949] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Rebuilding the list of instances to heal {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1736.678852] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1736.678852] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1736.679036] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1736.681029] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1736.681133] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1736.681260] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1736.681455] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1736.681530] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1736.681659] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1736.681780] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1736.681902] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Didn't find any instances for network info cache update. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 1736.682461] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1736.804893] env[61215]: DEBUG oslo_concurrency.lockutils [None req-7a7245fd-f0e5-4f35-b608-3d0e4bb63c3c tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Acquiring lock "e4cf3e92-a1a6-47ac-8625-37cdbf96cb35" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1736.805154] env[61215]: DEBUG oslo_concurrency.lockutils [None req-7a7245fd-f0e5-4f35-b608-3d0e4bb63c3c tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Lock "e4cf3e92-a1a6-47ac-8625-37cdbf96cb35" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1737.653794] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1739.372982] env[61215]: WARNING oslo_vmware.rw_handles [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1739.372982] env[61215]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1739.372982] env[61215]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1739.372982] env[61215]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1739.372982] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1739.372982] env[61215]: ERROR oslo_vmware.rw_handles response.begin() [ 1739.372982] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1739.372982] env[61215]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1739.372982] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1739.372982] env[61215]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1739.372982] env[61215]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1739.372982] env[61215]: ERROR oslo_vmware.rw_handles [ 1739.373683] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Downloaded image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to vmware_temp/00c7e34a-aca4-4eb2-b284-c6d646988f0b/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1739.375817] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Caching image {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1739.376101] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Copying Virtual Disk [datastore1] vmware_temp/00c7e34a-aca4-4eb2-b284-c6d646988f0b/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk to [datastore1] vmware_temp/00c7e34a-aca4-4eb2-b284-c6d646988f0b/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk {{(pid=61215) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1739.376389] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-28eaa749-5a4f-4ced-b7d6-618f0ba04152 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.385285] env[61215]: DEBUG oslo_vmware.api [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Waiting for the task: (returnval){ [ 1739.385285] env[61215]: value = "task-1690358" [ 1739.385285] env[61215]: _type = "Task" [ 1739.385285] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.393339] env[61215]: DEBUG oslo_vmware.api [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Task: {'id': task-1690358, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.895973] env[61215]: DEBUG oslo_vmware.exceptions [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Fault InvalidArgument not matched. {{(pid=61215) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1739.896397] env[61215]: DEBUG oslo_concurrency.lockutils [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1739.896945] env[61215]: ERROR nova.compute.manager [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1739.896945] env[61215]: Faults: ['InvalidArgument'] [ 1739.896945] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] Traceback (most recent call last): [ 1739.896945] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1739.896945] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] yield resources [ 1739.896945] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1739.896945] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] self.driver.spawn(context, instance, image_meta, [ 1739.896945] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1739.896945] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1739.896945] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1739.896945] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] self._fetch_image_if_missing(context, vi) [ 1739.896945] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1739.897396] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] image_cache(vi, tmp_image_ds_loc) [ 1739.897396] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1739.897396] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] vm_util.copy_virtual_disk( [ 1739.897396] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1739.897396] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] session._wait_for_task(vmdk_copy_task) [ 1739.897396] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1739.897396] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] return self.wait_for_task(task_ref) [ 1739.897396] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1739.897396] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] return evt.wait() [ 1739.897396] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1739.897396] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] result = hub.switch() [ 1739.897396] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1739.897396] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] return self.greenlet.switch() [ 1739.897766] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1739.897766] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] self.f(*self.args, **self.kw) [ 1739.897766] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1739.897766] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] raise exceptions.translate_fault(task_info.error) [ 1739.897766] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1739.897766] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] Faults: ['InvalidArgument'] [ 1739.897766] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] [ 1739.897766] env[61215]: INFO nova.compute.manager [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Terminating instance [ 1739.899481] env[61215]: DEBUG nova.compute.manager [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1739.899749] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1739.900045] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1739.900247] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1739.900995] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61b6d2d9-17e5-439e-947f-258ee0fa98d6 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.903748] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ce1d1866-f649-4892-8d2c-02a9163974c9 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.909342] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Unregistering the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1739.909565] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4ec2b872-648f-44f5-b500-89dbe0832b4d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.911804] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1739.911987] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61215) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1739.912921] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0511f31c-9240-46ac-8773-a383091ec5e7 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.917514] env[61215]: DEBUG oslo_vmware.api [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Waiting for the task: (returnval){ [ 1739.917514] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]525d3254-0097-b951-1af9-628574006d54" [ 1739.917514] env[61215]: _type = "Task" [ 1739.917514] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.924801] env[61215]: DEBUG oslo_vmware.api [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]525d3254-0097-b951-1af9-628574006d54, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.990810] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Unregistered the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1739.990947] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Deleting contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1739.991089] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Deleting the datastore file [datastore1] 82698789-4c08-453b-a973-1916d1f94af6 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1739.991360] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d9ca2991-6b97-48d8-8375-c34e0ca6825e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.997585] env[61215]: DEBUG oslo_vmware.api [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Waiting for the task: (returnval){ [ 1739.997585] env[61215]: value = "task-1690360" [ 1739.997585] env[61215]: _type = "Task" [ 1739.997585] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.005040] env[61215]: DEBUG oslo_vmware.api [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Task: {'id': task-1690360, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.428040] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Preparing fetch location {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1740.428314] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Creating directory with path [datastore1] vmware_temp/604fc32e-af8c-4397-b0d9-288d4b4e752d/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1740.428538] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7d898b63-2c33-4c20-b1fe-8cf3a4801602 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.440840] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Created directory with path [datastore1] vmware_temp/604fc32e-af8c-4397-b0d9-288d4b4e752d/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1740.441043] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Fetch image to [datastore1] vmware_temp/604fc32e-af8c-4397-b0d9-288d4b4e752d/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1740.441225] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to [datastore1] vmware_temp/604fc32e-af8c-4397-b0d9-288d4b4e752d/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1740.442031] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b87f6711-4cd9-444a-b6c2-924066a90ff0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.448626] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c5e9826-f083-46ab-bc54-5bf112983bb5 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.457397] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-630c7e15-a2a4-4ef8-8471-9dec9e0cb9c4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.488553] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0a6f36b-95de-4d20-9db4-bf534f5b7cd6 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.493817] env[61215]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-93004ca4-198e-4680-81fa-8e982b0df100 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.505771] env[61215]: DEBUG oslo_vmware.api [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Task: {'id': task-1690360, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072029} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.506009] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1740.506204] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Deleted contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1740.506396] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1740.506542] env[61215]: INFO nova.compute.manager [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1740.508669] env[61215]: DEBUG nova.compute.claims [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Aborting claim: {{(pid=61215) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1740.508841] env[61215]: DEBUG oslo_concurrency.lockutils [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1740.509065] env[61215]: DEBUG oslo_concurrency.lockutils [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1740.580983] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1740.632826] env[61215]: DEBUG oslo_vmware.rw_handles [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/604fc32e-af8c-4397-b0d9-288d4b4e752d/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1740.690995] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1740.697142] env[61215]: DEBUG oslo_vmware.rw_handles [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Completed reading data from the image iterator. {{(pid=61215) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1740.697378] env[61215]: DEBUG oslo_vmware.rw_handles [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/604fc32e-af8c-4397-b0d9-288d4b4e752d/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1740.702714] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1740.899386] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b8db82c-fc79-463f-8f01-be4880366582 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.907049] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8faca449-d3ab-42d2-95c1-40cc6eb1c496 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.936051] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da20d4bf-0fcd-428b-b7c9-87ee9c5f0442 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.942665] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c8747b8-e67a-4cf7-a44a-1129b936fc8c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.956400] env[61215]: DEBUG nova.compute.provider_tree [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1740.964523] env[61215]: DEBUG nova.scheduler.client.report [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1740.977695] env[61215]: DEBUG oslo_concurrency.lockutils [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.469s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1740.978219] env[61215]: ERROR nova.compute.manager [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1740.978219] env[61215]: Faults: ['InvalidArgument'] [ 1740.978219] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] Traceback (most recent call last): [ 1740.978219] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1740.978219] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] self.driver.spawn(context, instance, image_meta, [ 1740.978219] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1740.978219] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1740.978219] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1740.978219] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] self._fetch_image_if_missing(context, vi) [ 1740.978219] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1740.978219] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] image_cache(vi, tmp_image_ds_loc) [ 1740.978219] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1740.978607] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] vm_util.copy_virtual_disk( [ 1740.978607] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1740.978607] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] session._wait_for_task(vmdk_copy_task) [ 1740.978607] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1740.978607] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] return self.wait_for_task(task_ref) [ 1740.978607] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1740.978607] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] return evt.wait() [ 1740.978607] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1740.978607] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] result = hub.switch() [ 1740.978607] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1740.978607] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] return self.greenlet.switch() [ 1740.978607] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1740.978607] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] self.f(*self.args, **self.kw) [ 1740.979098] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1740.979098] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] raise exceptions.translate_fault(task_info.error) [ 1740.979098] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1740.979098] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] Faults: ['InvalidArgument'] [ 1740.979098] env[61215]: ERROR nova.compute.manager [instance: 82698789-4c08-453b-a973-1916d1f94af6] [ 1740.979098] env[61215]: DEBUG nova.compute.utils [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] VimFaultException {{(pid=61215) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1740.979829] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.277s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1740.980015] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1740.980432] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61215) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1740.982384] env[61215]: DEBUG nova.compute.manager [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Build of instance 82698789-4c08-453b-a973-1916d1f94af6 was re-scheduled: A specified parameter was not correct: fileType [ 1740.982384] env[61215]: Faults: ['InvalidArgument'] {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1740.982760] env[61215]: DEBUG nova.compute.manager [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Unplugging VIFs for instance {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1740.982935] env[61215]: DEBUG nova.compute.manager [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1740.983120] env[61215]: DEBUG nova.compute.manager [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1740.983287] env[61215]: DEBUG nova.network.neutron [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1740.985311] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c36b1579-dd3f-4d8c-9f34-64ad0f638b09 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.993405] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba56cb96-8930-4494-85b2-6cae753335d6 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.006665] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-505dc925-62de-4206-9869-478dc8572a67 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.012648] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50527a48-533a-4efd-b039-fc04d5e827cd {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.040676] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181292MB free_disk=173GB free_vcpus=48 pci_devices=None {{(pid=61215) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1741.040859] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1741.041084] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1741.116632] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 82698789-4c08-453b-a973-1916d1f94af6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1741.116801] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 0223d7b6-12e1-4418-97f2-012ed41daa7a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1741.116932] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1741.117072] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1741.117196] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance d49f702b-cd29-4491-938c-0291b351ef20 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1741.117316] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1741.117433] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1741.117549] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 8d4665c7-67de-4ab3-a8b7-596a5e1152ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1741.117665] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1741.117780] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 2e186217-c1e1-40c6-8d84-988f35f6b93d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1741.130203] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1741.141556] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance c233ab81-232d-49be-a176-bf846f0d8cc3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1741.154250] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance bb56c470-9f85-44b1-b1ec-f44236e9de51 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1741.166803] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance d0a5229f-8da2-40bb-af99-28f32923892f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1741.180009] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance ddaa12c8-88c6-4ba0-beec-cad92acd9768 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1741.190423] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance a17827ee-8ad2-459b-ba7d-f9f9be429e64 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1741.204098] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 96a536ea-a1c7-470e-8873-bc1e723efefa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1741.215957] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 40fe7cd9-7c99-4add-a2eb-429ff2aba7a2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1741.227521] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance e9369a71-fc94-4cdd-82c6-6308783581c1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1741.238109] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 38fe96cf-e570-4c0e-af6f-2f199efc06ff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1741.248249] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance f91efd4b-851e-44bc-9cf2-7be8a2d2d7df has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1741.259165] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 0576d0b5-3890-4e1d-b208-40d46c2fdae7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1741.270873] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance bf807d62-c8be-4819-9fc1-4b2d6d14cc39 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1741.282969] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 12825ddf-86ee-4500-b43b-cf480dc54f3a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1741.294290] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance a42577f4-29ba-446b-a561-745ff14d1696 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1741.307667] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance e4cf3e92-a1a6-47ac-8625-37cdbf96cb35 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1741.308061] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1741.308177] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1741.401206] env[61215]: DEBUG nova.network.neutron [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1741.421757] env[61215]: INFO nova.compute.manager [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Took 0.44 seconds to deallocate network for instance. [ 1741.525420] env[61215]: INFO nova.scheduler.client.report [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Deleted allocations for instance 82698789-4c08-453b-a973-1916d1f94af6 [ 1741.562957] env[61215]: DEBUG oslo_concurrency.lockutils [None req-08a722b5-3092-402c-b925-bf33f194bc4c tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Lock "82698789-4c08-453b-a973-1916d1f94af6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 465.533s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1741.564179] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ae2513eb-6a21-476a-b92f-072d8e2ed7c0 tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Lock "82698789-4c08-453b-a973-1916d1f94af6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 267.038s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1741.564418] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ae2513eb-6a21-476a-b92f-072d8e2ed7c0 tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Acquiring lock "82698789-4c08-453b-a973-1916d1f94af6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1741.564637] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ae2513eb-6a21-476a-b92f-072d8e2ed7c0 tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Lock "82698789-4c08-453b-a973-1916d1f94af6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1741.564849] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ae2513eb-6a21-476a-b92f-072d8e2ed7c0 tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Lock "82698789-4c08-453b-a973-1916d1f94af6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1741.567409] env[61215]: INFO nova.compute.manager [None req-ae2513eb-6a21-476a-b92f-072d8e2ed7c0 tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Terminating instance [ 1741.571707] env[61215]: DEBUG nova.compute.manager [None req-ae2513eb-6a21-476a-b92f-072d8e2ed7c0 tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1741.572328] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-ae2513eb-6a21-476a-b92f-072d8e2ed7c0 tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1741.572378] env[61215]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d7c1396e-8fd3-4d46-91bf-bd648e344d33 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.577829] env[61215]: DEBUG nova.compute.manager [None req-16cfbbd1-b5a4-4ff6-b608-afa1fa993c4c tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: ad606ad3-d291-4a71-91d5-850a9795f301] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1741.584734] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e30bbff-13b5-49a3-9281-265b1e31c1d9 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.616751] env[61215]: WARNING nova.virt.vmwareapi.vmops [None req-ae2513eb-6a21-476a-b92f-072d8e2ed7c0 tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 82698789-4c08-453b-a973-1916d1f94af6 could not be found. [ 1741.616927] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-ae2513eb-6a21-476a-b92f-072d8e2ed7c0 tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1741.617126] env[61215]: INFO nova.compute.manager [None req-ae2513eb-6a21-476a-b92f-072d8e2ed7c0 tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1741.617372] env[61215]: DEBUG oslo.service.loopingcall [None req-ae2513eb-6a21-476a-b92f-072d8e2ed7c0 tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1741.620053] env[61215]: DEBUG nova.compute.manager [None req-16cfbbd1-b5a4-4ff6-b608-afa1fa993c4c tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] [instance: ad606ad3-d291-4a71-91d5-850a9795f301] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1741.621059] env[61215]: DEBUG nova.compute.manager [-] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1741.621059] env[61215]: DEBUG nova.network.neutron [-] [instance: 82698789-4c08-453b-a973-1916d1f94af6] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1741.642076] env[61215]: DEBUG oslo_concurrency.lockutils [None req-16cfbbd1-b5a4-4ff6-b608-afa1fa993c4c tempest-MigrationsAdminTest-915425646 tempest-MigrationsAdminTest-915425646-project-member] Lock "ad606ad3-d291-4a71-91d5-850a9795f301" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 234.804s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1741.647893] env[61215]: DEBUG nova.network.neutron [-] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1741.650881] env[61215]: DEBUG nova.compute.manager [None req-959678f1-500b-45b6-8b58-c7c2e421a8f7 tempest-ServerRescueNegativeTestJSON-558999008 tempest-ServerRescueNegativeTestJSON-558999008-project-member] [instance: d8f5d964-fbcd-45fc-acb7-a0ca9e01b615] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1741.655890] env[61215]: INFO nova.compute.manager [-] [instance: 82698789-4c08-453b-a973-1916d1f94af6] Took 0.03 seconds to deallocate network for instance. [ 1741.675510] env[61215]: DEBUG nova.compute.manager [None req-959678f1-500b-45b6-8b58-c7c2e421a8f7 tempest-ServerRescueNegativeTestJSON-558999008 tempest-ServerRescueNegativeTestJSON-558999008-project-member] [instance: d8f5d964-fbcd-45fc-acb7-a0ca9e01b615] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1741.696711] env[61215]: DEBUG oslo_concurrency.lockutils [None req-959678f1-500b-45b6-8b58-c7c2e421a8f7 tempest-ServerRescueNegativeTestJSON-558999008 tempest-ServerRescueNegativeTestJSON-558999008-project-member] Lock "d8f5d964-fbcd-45fc-acb7-a0ca9e01b615" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 229.770s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1741.709122] env[61215]: DEBUG nova.compute.manager [None req-6a9817d3-fe79-4dc8-9fe4-947c54d30f8c tempest-ServerRescueNegativeTestJSON-558999008 tempest-ServerRescueNegativeTestJSON-558999008-project-member] [instance: 4cdbd5ad-fbbb-4cee-811c-60cf47094cad] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1741.714782] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-004dc09c-df73-4339-993f-e4bedd305902 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.723289] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a74fcaf5-8141-42b4-a405-5962068ea119 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.756417] env[61215]: DEBUG nova.compute.manager [None req-6a9817d3-fe79-4dc8-9fe4-947c54d30f8c tempest-ServerRescueNegativeTestJSON-558999008 tempest-ServerRescueNegativeTestJSON-558999008-project-member] [instance: 4cdbd5ad-fbbb-4cee-811c-60cf47094cad] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1741.760168] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-482722a0-2dd5-418f-b365-6b6eadb693ad {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.762911] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ae2513eb-6a21-476a-b92f-072d8e2ed7c0 tempest-InstanceActionsNegativeTestJSON-64149177 tempest-InstanceActionsNegativeTestJSON-64149177-project-member] Lock "82698789-4c08-453b-a973-1916d1f94af6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.199s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1741.772566] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c651e12-8c5b-4118-b84b-056f080baf6c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.787341] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1741.789470] env[61215]: DEBUG oslo_concurrency.lockutils [None req-6a9817d3-fe79-4dc8-9fe4-947c54d30f8c tempest-ServerRescueNegativeTestJSON-558999008 tempest-ServerRescueNegativeTestJSON-558999008-project-member] Lock "4cdbd5ad-fbbb-4cee-811c-60cf47094cad" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 227.886s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1741.796038] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1741.800299] env[61215]: DEBUG nova.compute.manager [None req-b9ce1d83-442e-4660-bbcb-07ddff5976ce tempest-ListImageFiltersTestJSON-577297259 tempest-ListImageFiltersTestJSON-577297259-project-member] [instance: dfbd190d-8565-4272-8320-eef68d00b9a1] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1741.809253] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61215) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1741.809253] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.768s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1741.826143] env[61215]: DEBUG nova.compute.manager [None req-b9ce1d83-442e-4660-bbcb-07ddff5976ce tempest-ListImageFiltersTestJSON-577297259 tempest-ListImageFiltersTestJSON-577297259-project-member] [instance: dfbd190d-8565-4272-8320-eef68d00b9a1] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1741.848548] env[61215]: DEBUG oslo_concurrency.lockutils [None req-b9ce1d83-442e-4660-bbcb-07ddff5976ce tempest-ListImageFiltersTestJSON-577297259 tempest-ListImageFiltersTestJSON-577297259-project-member] Lock "dfbd190d-8565-4272-8320-eef68d00b9a1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 225.509s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1741.858176] env[61215]: DEBUG nova.compute.manager [None req-0d63e8a7-bad5-45a9-8834-9e3985e0b3cd tempest-ListImageFiltersTestJSON-577297259 tempest-ListImageFiltersTestJSON-577297259-project-member] [instance: 26c74fa5-69fd-4a83-9ef7-8ed4103b5460] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1741.887174] env[61215]: DEBUG nova.compute.manager [None req-0d63e8a7-bad5-45a9-8834-9e3985e0b3cd tempest-ListImageFiltersTestJSON-577297259 tempest-ListImageFiltersTestJSON-577297259-project-member] [instance: 26c74fa5-69fd-4a83-9ef7-8ed4103b5460] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1741.907261] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0d63e8a7-bad5-45a9-8834-9e3985e0b3cd tempest-ListImageFiltersTestJSON-577297259 tempest-ListImageFiltersTestJSON-577297259-project-member] Lock "26c74fa5-69fd-4a83-9ef7-8ed4103b5460" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 224.551s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1741.916172] env[61215]: DEBUG nova.compute.manager [None req-d031ed1d-bd02-4d8d-b74a-f770e7dcbf8e tempest-ServerGroupTestJSON-1949014661 tempest-ServerGroupTestJSON-1949014661-project-member] [instance: 008007b0-7ff0-4711-80dc-707efea20e75] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1741.939642] env[61215]: DEBUG nova.compute.manager [None req-d031ed1d-bd02-4d8d-b74a-f770e7dcbf8e tempest-ServerGroupTestJSON-1949014661 tempest-ServerGroupTestJSON-1949014661-project-member] [instance: 008007b0-7ff0-4711-80dc-707efea20e75] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1741.960882] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d031ed1d-bd02-4d8d-b74a-f770e7dcbf8e tempest-ServerGroupTestJSON-1949014661 tempest-ServerGroupTestJSON-1949014661-project-member] Lock "008007b0-7ff0-4711-80dc-707efea20e75" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 218.618s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1741.972015] env[61215]: DEBUG nova.compute.manager [None req-95b00ab7-71fb-4c3c-b578-d1cd5d96d79f tempest-ServerAddressesNegativeTestJSON-2117665619 tempest-ServerAddressesNegativeTestJSON-2117665619-project-member] [instance: a0b2bf99-e82a-4866-844b-0e5ed758e78c] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1741.993476] env[61215]: DEBUG nova.compute.manager [None req-95b00ab7-71fb-4c3c-b578-d1cd5d96d79f tempest-ServerAddressesNegativeTestJSON-2117665619 tempest-ServerAddressesNegativeTestJSON-2117665619-project-member] [instance: a0b2bf99-e82a-4866-844b-0e5ed758e78c] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1742.015192] env[61215]: DEBUG oslo_concurrency.lockutils [None req-95b00ab7-71fb-4c3c-b578-d1cd5d96d79f tempest-ServerAddressesNegativeTestJSON-2117665619 tempest-ServerAddressesNegativeTestJSON-2117665619-project-member] Lock "a0b2bf99-e82a-4866-844b-0e5ed758e78c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.793s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1742.023546] env[61215]: DEBUG nova.compute.manager [None req-54e97152-2d35-40ea-ba8c-33890671b22d tempest-AttachInterfacesTestJSON-1825426080 tempest-AttachInterfacesTestJSON-1825426080-project-member] [instance: 953e0804-8220-4fb0-a4af-8956be949a54] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1742.045911] env[61215]: DEBUG nova.compute.manager [None req-54e97152-2d35-40ea-ba8c-33890671b22d tempest-AttachInterfacesTestJSON-1825426080 tempest-AttachInterfacesTestJSON-1825426080-project-member] [instance: 953e0804-8220-4fb0-a4af-8956be949a54] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1742.064799] env[61215]: DEBUG oslo_concurrency.lockutils [None req-54e97152-2d35-40ea-ba8c-33890671b22d tempest-AttachInterfacesTestJSON-1825426080 tempest-AttachInterfacesTestJSON-1825426080-project-member] Lock "953e0804-8220-4fb0-a4af-8956be949a54" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.651s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1742.072609] env[61215]: DEBUG nova.compute.manager [None req-c4f08818-3501-4caf-b419-d0f5e404a18d tempest-ServerActionsV293TestJSON-1175145830 tempest-ServerActionsV293TestJSON-1175145830-project-member] [instance: 3cf6ec8a-489b-4102-9fcb-581587345fc5] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1742.095639] env[61215]: DEBUG nova.compute.manager [None req-c4f08818-3501-4caf-b419-d0f5e404a18d tempest-ServerActionsV293TestJSON-1175145830 tempest-ServerActionsV293TestJSON-1175145830-project-member] [instance: 3cf6ec8a-489b-4102-9fcb-581587345fc5] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1742.117933] env[61215]: DEBUG oslo_concurrency.lockutils [None req-c4f08818-3501-4caf-b419-d0f5e404a18d tempest-ServerActionsV293TestJSON-1175145830 tempest-ServerActionsV293TestJSON-1175145830-project-member] Lock "3cf6ec8a-489b-4102-9fcb-581587345fc5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 215.050s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1742.127974] env[61215]: DEBUG nova.compute.manager [None req-0c7a8989-bfec-4fc2-b942-5dfa56facefd tempest-ServersNegativeTestJSON-1499202249 tempest-ServersNegativeTestJSON-1499202249-project-member] [instance: 772cf4c4-cdc2-4a00-8891-908b31827a7b] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1742.150991] env[61215]: DEBUG nova.compute.manager [None req-0c7a8989-bfec-4fc2-b942-5dfa56facefd tempest-ServersNegativeTestJSON-1499202249 tempest-ServersNegativeTestJSON-1499202249-project-member] [instance: 772cf4c4-cdc2-4a00-8891-908b31827a7b] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1742.172025] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0c7a8989-bfec-4fc2-b942-5dfa56facefd tempest-ServersNegativeTestJSON-1499202249 tempest-ServersNegativeTestJSON-1499202249-project-member] Lock "772cf4c4-cdc2-4a00-8891-908b31827a7b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 214.008s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1742.180776] env[61215]: DEBUG nova.compute.manager [None req-fd7ba7c5-9f1d-43a3-bd18-ae56a3b70b3b tempest-ImagesOneServerNegativeTestJSON-1147663856 tempest-ImagesOneServerNegativeTestJSON-1147663856-project-member] [instance: eb4e172e-1eb1-4e31-a311-96f772f1a196] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1742.203868] env[61215]: DEBUG nova.compute.manager [None req-fd7ba7c5-9f1d-43a3-bd18-ae56a3b70b3b tempest-ImagesOneServerNegativeTestJSON-1147663856 tempest-ImagesOneServerNegativeTestJSON-1147663856-project-member] [instance: eb4e172e-1eb1-4e31-a311-96f772f1a196] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1742.223124] env[61215]: DEBUG oslo_concurrency.lockutils [None req-fd7ba7c5-9f1d-43a3-bd18-ae56a3b70b3b tempest-ImagesOneServerNegativeTestJSON-1147663856 tempest-ImagesOneServerNegativeTestJSON-1147663856-project-member] Lock "eb4e172e-1eb1-4e31-a311-96f772f1a196" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.729s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1742.233996] env[61215]: DEBUG nova.compute.manager [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1742.282042] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1742.282308] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1742.283843] env[61215]: INFO nova.compute.claims [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1742.621656] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-601ae3ed-2d62-4bec-b1b6-eca80ce226ad {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.629361] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9547e1ad-c3ee-4bab-a758-68553393106b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.658585] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdc72c1b-fc14-4f73-8160-4c029fc19b9b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.665504] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c39552e-33d5-4f67-b771-13303c1809b8 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.678023] env[61215]: DEBUG nova.compute.provider_tree [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1742.687007] env[61215]: DEBUG nova.scheduler.client.report [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1742.699704] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.417s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1742.700861] env[61215]: DEBUG nova.compute.manager [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Start building networks asynchronously for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1742.737449] env[61215]: DEBUG nova.compute.utils [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Using /dev/sd instead of None {{(pid=61215) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1742.738918] env[61215]: DEBUG nova.compute.manager [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Allocating IP information in the background. {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1742.739123] env[61215]: DEBUG nova.network.neutron [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] allocate_for_instance() {{(pid=61215) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1742.747649] env[61215]: DEBUG nova.compute.manager [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Start building block device mappings for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1742.816633] env[61215]: DEBUG nova.compute.manager [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Start spawning the instance on the hypervisor. {{(pid=61215) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1742.820946] env[61215]: DEBUG nova.policy [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5f8cfcd284ce4cadac6be10c5a4cee92', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f31b73e1640842478b71905fe6e586ae', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61215) authorize /opt/stack/nova/nova/policy.py:203}} [ 1742.846050] env[61215]: DEBUG nova.virt.hardware [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:05:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:04:45Z,direct_url=,disk_format='vmdk',id=e91f0c25-9ff9-4937-8440-f47cfb2028bc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9bc3f57c82894c5d9e08b66e77a25dc5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:04:46Z,virtual_size=,visibility=), allow threads: False {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1742.846334] env[61215]: DEBUG nova.virt.hardware [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Flavor limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1742.846451] env[61215]: DEBUG nova.virt.hardware [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Image limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1742.846641] env[61215]: DEBUG nova.virt.hardware [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Flavor pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1742.846795] env[61215]: DEBUG nova.virt.hardware [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Image pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1742.846946] env[61215]: DEBUG nova.virt.hardware [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1742.847167] env[61215]: DEBUG nova.virt.hardware [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1742.847331] env[61215]: DEBUG nova.virt.hardware [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1742.847500] env[61215]: DEBUG nova.virt.hardware [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Got 1 possible topologies {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1742.847669] env[61215]: DEBUG nova.virt.hardware [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1742.847876] env[61215]: DEBUG nova.virt.hardware [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1742.848972] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42eac5a6-caed-41d1-9a56-81db9ce79fae {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.857435] env[61215]: DEBUG oslo_concurrency.lockutils [None req-3d8ad015-c2e3-477c-939f-3a212c6ee8fe tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Acquiring lock "a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1742.861379] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72250c89-4ef9-44ee-af37-0cfbafbc039e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.183184] env[61215]: DEBUG nova.network.neutron [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Successfully created port: 71fe011f-404c-4634-9211-4d53e88e4e40 {{(pid=61215) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1743.882531] env[61215]: DEBUG nova.compute.manager [req-a3d54241-636c-4e21-8e22-bfcabb85bfc7 req-93179389-6189-4a2d-9be8-7e26d6be12d3 service nova] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Received event network-vif-plugged-71fe011f-404c-4634-9211-4d53e88e4e40 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1743.882841] env[61215]: DEBUG oslo_concurrency.lockutils [req-a3d54241-636c-4e21-8e22-bfcabb85bfc7 req-93179389-6189-4a2d-9be8-7e26d6be12d3 service nova] Acquiring lock "a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1743.883075] env[61215]: DEBUG oslo_concurrency.lockutils [req-a3d54241-636c-4e21-8e22-bfcabb85bfc7 req-93179389-6189-4a2d-9be8-7e26d6be12d3 service nova] Lock "a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1743.883252] env[61215]: DEBUG oslo_concurrency.lockutils [req-a3d54241-636c-4e21-8e22-bfcabb85bfc7 req-93179389-6189-4a2d-9be8-7e26d6be12d3 service nova] Lock "a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1743.883423] env[61215]: DEBUG nova.compute.manager [req-a3d54241-636c-4e21-8e22-bfcabb85bfc7 req-93179389-6189-4a2d-9be8-7e26d6be12d3 service nova] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] No waiting events found dispatching network-vif-plugged-71fe011f-404c-4634-9211-4d53e88e4e40 {{(pid=61215) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1743.883592] env[61215]: WARNING nova.compute.manager [req-a3d54241-636c-4e21-8e22-bfcabb85bfc7 req-93179389-6189-4a2d-9be8-7e26d6be12d3 service nova] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Received unexpected event network-vif-plugged-71fe011f-404c-4634-9211-4d53e88e4e40 for instance with vm_state building and task_state deleting. [ 1744.013483] env[61215]: DEBUG nova.network.neutron [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Successfully updated port: 71fe011f-404c-4634-9211-4d53e88e4e40 {{(pid=61215) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1744.032751] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Acquiring lock "refresh_cache-a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1744.032906] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Acquired lock "refresh_cache-a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1744.033077] env[61215]: DEBUG nova.network.neutron [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1744.087246] env[61215]: DEBUG nova.network.neutron [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1744.252778] env[61215]: DEBUG nova.network.neutron [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Updating instance_info_cache with network_info: [{"id": "71fe011f-404c-4634-9211-4d53e88e4e40", "address": "fa:16:3e:67:cc:26", "network": {"id": "7b3ee516-1bf4-4f5f-b757-838189d69d44", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-557895829-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f31b73e1640842478b71905fe6e586ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d47d5e1d-e66d-4f2c-83e6-d5e78c2b767d", "external-id": "nsx-vlan-transportzone-109", "segmentation_id": 109, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71fe011f-40", "ovs_interfaceid": "71fe011f-404c-4634-9211-4d53e88e4e40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1744.263732] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Releasing lock "refresh_cache-a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1744.264028] env[61215]: DEBUG nova.compute.manager [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Instance network_info: |[{"id": "71fe011f-404c-4634-9211-4d53e88e4e40", "address": "fa:16:3e:67:cc:26", "network": {"id": "7b3ee516-1bf4-4f5f-b757-838189d69d44", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-557895829-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f31b73e1640842478b71905fe6e586ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d47d5e1d-e66d-4f2c-83e6-d5e78c2b767d", "external-id": "nsx-vlan-transportzone-109", "segmentation_id": 109, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71fe011f-40", "ovs_interfaceid": "71fe011f-404c-4634-9211-4d53e88e4e40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1744.264454] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:67:cc:26', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd47d5e1d-e66d-4f2c-83e6-d5e78c2b767d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '71fe011f-404c-4634-9211-4d53e88e4e40', 'vif_model': 'vmxnet3'}] {{(pid=61215) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1744.272101] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Creating folder: Project (f31b73e1640842478b71905fe6e586ae). Parent ref: group-v352463. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1744.272651] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-25c32f5d-4e30-49bb-ab82-176871d33ab8 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.283919] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Created folder: Project (f31b73e1640842478b71905fe6e586ae) in parent group-v352463. [ 1744.284177] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Creating folder: Instances. Parent ref: group-v352527. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1744.284413] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bc538771-94dd-4311-b15c-ec28a91da32e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.293379] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Created folder: Instances in parent group-v352527. [ 1744.293577] env[61215]: DEBUG oslo.service.loopingcall [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1744.293762] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Creating VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1744.294135] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bc9c675c-4e08-487f-b2c6-93dd6bbd1069 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.313309] env[61215]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1744.313309] env[61215]: value = "task-1690363" [ 1744.313309] env[61215]: _type = "Task" [ 1744.313309] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1744.320740] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690363, 'name': CreateVM_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.823368] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690363, 'name': CreateVM_Task, 'duration_secs': 0.2958} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1744.823368] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Created VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1744.824064] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1744.824260] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1744.824579] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1744.824830] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19a3fede-64c9-4df2-8833-849d3bf8eba0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.829133] env[61215]: DEBUG oslo_vmware.api [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Waiting for the task: (returnval){ [ 1744.829133] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52362e51-d65b-1aac-267c-0d5be501a759" [ 1744.829133] env[61215]: _type = "Task" [ 1744.829133] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1744.836567] env[61215]: DEBUG oslo_vmware.api [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52362e51-d65b-1aac-267c-0d5be501a759, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.339032] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1745.339296] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Processing image e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1745.339513] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1745.917514] env[61215]: DEBUG nova.compute.manager [req-212d77ca-895e-4dd2-8161-ad1ae780735a req-5a4c3ba7-fe15-4c80-ad27-e66a21ee00e4 service nova] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Received event network-changed-71fe011f-404c-4634-9211-4d53e88e4e40 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1745.917795] env[61215]: DEBUG nova.compute.manager [req-212d77ca-895e-4dd2-8161-ad1ae780735a req-5a4c3ba7-fe15-4c80-ad27-e66a21ee00e4 service nova] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Refreshing instance network info cache due to event network-changed-71fe011f-404c-4634-9211-4d53e88e4e40. {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 1745.918069] env[61215]: DEBUG oslo_concurrency.lockutils [req-212d77ca-895e-4dd2-8161-ad1ae780735a req-5a4c3ba7-fe15-4c80-ad27-e66a21ee00e4 service nova] Acquiring lock "refresh_cache-a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1745.918249] env[61215]: DEBUG oslo_concurrency.lockutils [req-212d77ca-895e-4dd2-8161-ad1ae780735a req-5a4c3ba7-fe15-4c80-ad27-e66a21ee00e4 service nova] Acquired lock "refresh_cache-a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1745.918429] env[61215]: DEBUG nova.network.neutron [req-212d77ca-895e-4dd2-8161-ad1ae780735a req-5a4c3ba7-fe15-4c80-ad27-e66a21ee00e4 service nova] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Refreshing network info cache for port 71fe011f-404c-4634-9211-4d53e88e4e40 {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1746.226920] env[61215]: DEBUG nova.network.neutron [req-212d77ca-895e-4dd2-8161-ad1ae780735a req-5a4c3ba7-fe15-4c80-ad27-e66a21ee00e4 service nova] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Updated VIF entry in instance network info cache for port 71fe011f-404c-4634-9211-4d53e88e4e40. {{(pid=61215) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1746.227320] env[61215]: DEBUG nova.network.neutron [req-212d77ca-895e-4dd2-8161-ad1ae780735a req-5a4c3ba7-fe15-4c80-ad27-e66a21ee00e4 service nova] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Updating instance_info_cache with network_info: [{"id": "71fe011f-404c-4634-9211-4d53e88e4e40", "address": "fa:16:3e:67:cc:26", "network": {"id": "7b3ee516-1bf4-4f5f-b757-838189d69d44", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-557895829-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f31b73e1640842478b71905fe6e586ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d47d5e1d-e66d-4f2c-83e6-d5e78c2b767d", "external-id": "nsx-vlan-transportzone-109", "segmentation_id": 109, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71fe011f-40", "ovs_interfaceid": "71fe011f-404c-4634-9211-4d53e88e4e40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1746.237024] env[61215]: DEBUG oslo_concurrency.lockutils [req-212d77ca-895e-4dd2-8161-ad1ae780735a req-5a4c3ba7-fe15-4c80-ad27-e66a21ee00e4 service nova] Releasing lock "refresh_cache-a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1788.517408] env[61215]: WARNING oslo_vmware.rw_handles [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1788.517408] env[61215]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1788.517408] env[61215]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1788.517408] env[61215]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1788.517408] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1788.517408] env[61215]: ERROR oslo_vmware.rw_handles response.begin() [ 1788.517408] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1788.517408] env[61215]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1788.517408] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1788.517408] env[61215]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1788.517408] env[61215]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1788.517408] env[61215]: ERROR oslo_vmware.rw_handles [ 1788.518032] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Downloaded image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to vmware_temp/604fc32e-af8c-4397-b0d9-288d4b4e752d/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1788.519761] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Caching image {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1788.520077] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Copying Virtual Disk [datastore1] vmware_temp/604fc32e-af8c-4397-b0d9-288d4b4e752d/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk to [datastore1] vmware_temp/604fc32e-af8c-4397-b0d9-288d4b4e752d/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk {{(pid=61215) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1788.520389] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-51acb3a2-22aa-4706-a02c-e436342d83c9 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.529123] env[61215]: DEBUG oslo_vmware.api [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Waiting for the task: (returnval){ [ 1788.529123] env[61215]: value = "task-1690364" [ 1788.529123] env[61215]: _type = "Task" [ 1788.529123] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1788.537436] env[61215]: DEBUG oslo_vmware.api [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Task: {'id': task-1690364, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.039569] env[61215]: DEBUG oslo_vmware.exceptions [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Fault InvalidArgument not matched. {{(pid=61215) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1789.039878] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1789.040508] env[61215]: ERROR nova.compute.manager [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1789.040508] env[61215]: Faults: ['InvalidArgument'] [ 1789.040508] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Traceback (most recent call last): [ 1789.040508] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1789.040508] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] yield resources [ 1789.040508] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1789.040508] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] self.driver.spawn(context, instance, image_meta, [ 1789.040508] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1789.040508] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1789.040508] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1789.040508] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] self._fetch_image_if_missing(context, vi) [ 1789.040508] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1789.040861] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] image_cache(vi, tmp_image_ds_loc) [ 1789.040861] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1789.040861] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] vm_util.copy_virtual_disk( [ 1789.040861] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1789.040861] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] session._wait_for_task(vmdk_copy_task) [ 1789.040861] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1789.040861] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] return self.wait_for_task(task_ref) [ 1789.040861] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1789.040861] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] return evt.wait() [ 1789.040861] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1789.040861] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] result = hub.switch() [ 1789.040861] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1789.040861] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] return self.greenlet.switch() [ 1789.041279] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1789.041279] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] self.f(*self.args, **self.kw) [ 1789.041279] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1789.041279] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] raise exceptions.translate_fault(task_info.error) [ 1789.041279] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1789.041279] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Faults: ['InvalidArgument'] [ 1789.041279] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] [ 1789.041279] env[61215]: INFO nova.compute.manager [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Terminating instance [ 1789.042419] env[61215]: DEBUG oslo_concurrency.lockutils [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1789.042632] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1789.042866] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-daa99ded-05df-4a89-8a43-11d812f6df03 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.045094] env[61215]: DEBUG nova.compute.manager [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1789.045296] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1789.046041] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02efa1a2-13c5-4f3d-8276-d3cd8be24b8d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.053060] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Unregistering the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1789.053304] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-35c364a2-89c9-4c3d-b70f-7097d8f2dcaa {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.055711] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1789.055888] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61215) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1789.056861] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df2bbca4-c0cb-4649-98c7-61bf7135fe97 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.061688] env[61215]: DEBUG oslo_vmware.api [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Waiting for the task: (returnval){ [ 1789.061688] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52afc197-fc00-676f-0678-f6de27c07539" [ 1789.061688] env[61215]: _type = "Task" [ 1789.061688] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.073231] env[61215]: DEBUG oslo_vmware.api [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52afc197-fc00-676f-0678-f6de27c07539, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.132152] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Unregistered the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1789.132454] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Deleting contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1789.132681] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Deleting the datastore file [datastore1] 0223d7b6-12e1-4418-97f2-012ed41daa7a {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1789.132969] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-26622362-c135-4b6a-8eb6-7a8d9e365cbc {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.139266] env[61215]: DEBUG oslo_vmware.api [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Waiting for the task: (returnval){ [ 1789.139266] env[61215]: value = "task-1690366" [ 1789.139266] env[61215]: _type = "Task" [ 1789.139266] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.147022] env[61215]: DEBUG oslo_vmware.api [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Task: {'id': task-1690366, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.571944] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Preparing fetch location {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1789.572238] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Creating directory with path [datastore1] vmware_temp/3762a0e8-cdfa-4d77-ae93-b432854d7548/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1789.572460] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e49cb544-60bf-408b-b03a-f9c5d67f0b5a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.583459] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Created directory with path [datastore1] vmware_temp/3762a0e8-cdfa-4d77-ae93-b432854d7548/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1789.583654] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Fetch image to [datastore1] vmware_temp/3762a0e8-cdfa-4d77-ae93-b432854d7548/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1789.583826] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to [datastore1] vmware_temp/3762a0e8-cdfa-4d77-ae93-b432854d7548/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1789.584563] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c54f58e3-f151-4f7e-8f7b-a66b43daf95d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.591162] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7df1ab5-eb1d-48fb-abe8-27be4dcc1686 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.600138] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4582821-7540-4442-9fba-c65777b592eb {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.631359] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-503d7faf-a316-4488-8c82-f300268143e0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.637075] env[61215]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b2c64592-2b39-4bf9-a1b6-7da14f8c869e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.647418] env[61215]: DEBUG oslo_vmware.api [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Task: {'id': task-1690366, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076019} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1789.647656] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1789.647841] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Deleted contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1789.648066] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1789.648219] env[61215]: INFO nova.compute.manager [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1789.650368] env[61215]: DEBUG nova.compute.claims [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Aborting claim: {{(pid=61215) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1789.650628] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1789.650877] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1789.658775] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1789.714984] env[61215]: DEBUG oslo_vmware.rw_handles [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3762a0e8-cdfa-4d77-ae93-b432854d7548/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1789.779935] env[61215]: DEBUG oslo_vmware.rw_handles [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Completed reading data from the image iterator. {{(pid=61215) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1789.780239] env[61215]: DEBUG oslo_vmware.rw_handles [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3762a0e8-cdfa-4d77-ae93-b432854d7548/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1790.042764] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26a268f9-91e9-4699-bee1-02e70b651b11 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.050437] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6da3824a-41de-450f-a30f-54e177cf9444 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.081689] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1957dac8-9a85-44fc-99e4-df2cc483c6ba {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.089379] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02d7e9ce-14ec-4ac2-abf7-b8921d9267f7 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.103650] env[61215]: DEBUG nova.compute.provider_tree [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1790.113234] env[61215]: DEBUG nova.scheduler.client.report [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1790.129710] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.479s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1790.130320] env[61215]: ERROR nova.compute.manager [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1790.130320] env[61215]: Faults: ['InvalidArgument'] [ 1790.130320] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Traceback (most recent call last): [ 1790.130320] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1790.130320] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] self.driver.spawn(context, instance, image_meta, [ 1790.130320] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1790.130320] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1790.130320] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1790.130320] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] self._fetch_image_if_missing(context, vi) [ 1790.130320] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1790.130320] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] image_cache(vi, tmp_image_ds_loc) [ 1790.130320] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1790.130699] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] vm_util.copy_virtual_disk( [ 1790.130699] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1790.130699] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] session._wait_for_task(vmdk_copy_task) [ 1790.130699] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1790.130699] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] return self.wait_for_task(task_ref) [ 1790.130699] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1790.130699] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] return evt.wait() [ 1790.130699] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1790.130699] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] result = hub.switch() [ 1790.130699] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1790.130699] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] return self.greenlet.switch() [ 1790.130699] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1790.130699] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] self.f(*self.args, **self.kw) [ 1790.131112] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1790.131112] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] raise exceptions.translate_fault(task_info.error) [ 1790.131112] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1790.131112] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Faults: ['InvalidArgument'] [ 1790.131112] env[61215]: ERROR nova.compute.manager [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] [ 1790.131112] env[61215]: DEBUG nova.compute.utils [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] VimFaultException {{(pid=61215) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1790.132569] env[61215]: DEBUG nova.compute.manager [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Build of instance 0223d7b6-12e1-4418-97f2-012ed41daa7a was re-scheduled: A specified parameter was not correct: fileType [ 1790.132569] env[61215]: Faults: ['InvalidArgument'] {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1790.132935] env[61215]: DEBUG nova.compute.manager [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Unplugging VIFs for instance {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1790.133129] env[61215]: DEBUG nova.compute.manager [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1790.133307] env[61215]: DEBUG nova.compute.manager [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1790.133474] env[61215]: DEBUG nova.network.neutron [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1790.488655] env[61215]: DEBUG nova.network.neutron [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1790.499048] env[61215]: INFO nova.compute.manager [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Took 0.37 seconds to deallocate network for instance. [ 1790.604174] env[61215]: INFO nova.scheduler.client.report [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Deleted allocations for instance 0223d7b6-12e1-4418-97f2-012ed41daa7a [ 1790.633189] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f64fbe7d-073a-4d07-a386-ae262544859d tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Lock "0223d7b6-12e1-4418-97f2-012ed41daa7a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 509.930s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1790.634454] env[61215]: DEBUG oslo_concurrency.lockutils [None req-e3e5258b-7348-455a-a3a9-a84636ea497b tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Lock "0223d7b6-12e1-4418-97f2-012ed41daa7a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 310.351s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1790.634675] env[61215]: DEBUG oslo_concurrency.lockutils [None req-e3e5258b-7348-455a-a3a9-a84636ea497b tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Acquiring lock "0223d7b6-12e1-4418-97f2-012ed41daa7a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1790.634877] env[61215]: DEBUG oslo_concurrency.lockutils [None req-e3e5258b-7348-455a-a3a9-a84636ea497b tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Lock "0223d7b6-12e1-4418-97f2-012ed41daa7a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1790.635082] env[61215]: DEBUG oslo_concurrency.lockutils [None req-e3e5258b-7348-455a-a3a9-a84636ea497b tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Lock "0223d7b6-12e1-4418-97f2-012ed41daa7a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1790.637158] env[61215]: INFO nova.compute.manager [None req-e3e5258b-7348-455a-a3a9-a84636ea497b tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Terminating instance [ 1790.638910] env[61215]: DEBUG nova.compute.manager [None req-e3e5258b-7348-455a-a3a9-a84636ea497b tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1790.639126] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-e3e5258b-7348-455a-a3a9-a84636ea497b tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1790.640033] env[61215]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d29d9af9-7bb1-4389-b57c-12ca23487086 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.649034] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55b208df-0956-4136-b83e-02d24084447f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.659445] env[61215]: DEBUG nova.compute.manager [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1790.679602] env[61215]: WARNING nova.virt.vmwareapi.vmops [None req-e3e5258b-7348-455a-a3a9-a84636ea497b tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0223d7b6-12e1-4418-97f2-012ed41daa7a could not be found. [ 1790.679817] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-e3e5258b-7348-455a-a3a9-a84636ea497b tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1790.680009] env[61215]: INFO nova.compute.manager [None req-e3e5258b-7348-455a-a3a9-a84636ea497b tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1790.680792] env[61215]: DEBUG oslo.service.loopingcall [None req-e3e5258b-7348-455a-a3a9-a84636ea497b tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1790.680792] env[61215]: DEBUG nova.compute.manager [-] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1790.680792] env[61215]: DEBUG nova.network.neutron [-] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1790.705783] env[61215]: DEBUG nova.network.neutron [-] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1790.713098] env[61215]: DEBUG oslo_concurrency.lockutils [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1790.713360] env[61215]: DEBUG oslo_concurrency.lockutils [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1790.715201] env[61215]: INFO nova.compute.claims [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1790.718371] env[61215]: INFO nova.compute.manager [-] [instance: 0223d7b6-12e1-4418-97f2-012ed41daa7a] Took 0.04 seconds to deallocate network for instance. [ 1790.818454] env[61215]: DEBUG oslo_concurrency.lockutils [None req-e3e5258b-7348-455a-a3a9-a84636ea497b tempest-ImagesOneServerTestJSON-1339418439 tempest-ImagesOneServerTestJSON-1339418439-project-member] Lock "0223d7b6-12e1-4418-97f2-012ed41daa7a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.184s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1791.035768] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10be92e3-1190-4aa5-815e-8338be45f634 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.043372] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a0ff410-dc67-4ee2-b426-0752a89e9257 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.072024] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b5eaa77-42d9-494b-8b82-5a8e0776c4f3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.078935] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05c9e540-6f7f-475f-9d55-f1e41660aa38 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.091653] env[61215]: DEBUG nova.compute.provider_tree [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1791.100252] env[61215]: DEBUG nova.scheduler.client.report [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1791.114028] env[61215]: DEBUG oslo_concurrency.lockutils [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.401s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1791.114493] env[61215]: DEBUG nova.compute.manager [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Start building networks asynchronously for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1791.146026] env[61215]: DEBUG nova.compute.utils [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Using /dev/sd instead of None {{(pid=61215) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1791.147058] env[61215]: DEBUG nova.compute.manager [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Allocating IP information in the background. {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1791.147058] env[61215]: DEBUG nova.network.neutron [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] allocate_for_instance() {{(pid=61215) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1791.161427] env[61215]: DEBUG nova.compute.manager [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Start building block device mappings for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1791.209582] env[61215]: DEBUG nova.policy [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8463e1f0415f4536a8e2a5fe315e1dca', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '44797a7f59634ea692e532631427af87', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61215) authorize /opt/stack/nova/nova/policy.py:203}} [ 1791.227258] env[61215]: DEBUG nova.compute.manager [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Start spawning the instance on the hypervisor. {{(pid=61215) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1791.256544] env[61215]: DEBUG nova.virt.hardware [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:05:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:04:45Z,direct_url=,disk_format='vmdk',id=e91f0c25-9ff9-4937-8440-f47cfb2028bc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9bc3f57c82894c5d9e08b66e77a25dc5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:04:46Z,virtual_size=,visibility=), allow threads: False {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1791.256773] env[61215]: DEBUG nova.virt.hardware [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Flavor limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1791.256926] env[61215]: DEBUG nova.virt.hardware [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Image limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1791.257135] env[61215]: DEBUG nova.virt.hardware [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Flavor pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1791.257287] env[61215]: DEBUG nova.virt.hardware [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Image pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1791.257431] env[61215]: DEBUG nova.virt.hardware [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1791.257633] env[61215]: DEBUG nova.virt.hardware [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1791.257789] env[61215]: DEBUG nova.virt.hardware [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1791.257997] env[61215]: DEBUG nova.virt.hardware [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Got 1 possible topologies {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1791.258374] env[61215]: DEBUG nova.virt.hardware [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1791.258554] env[61215]: DEBUG nova.virt.hardware [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1791.259463] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-865ae110-be5a-4d80-81f2-752b139f72b3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.268871] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-665a77ef-696a-4340-b271-2b009c04e14a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.574295] env[61215]: DEBUG nova.network.neutron [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Successfully created port: 72b246c6-8f4e-4614-a716-f9ae45b2b757 {{(pid=61215) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1792.411230] env[61215]: DEBUG nova.network.neutron [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Successfully updated port: 72b246c6-8f4e-4614-a716-f9ae45b2b757 {{(pid=61215) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1792.429272] env[61215]: DEBUG oslo_concurrency.lockutils [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Acquiring lock "refresh_cache-c233ab81-232d-49be-a176-bf846f0d8cc3" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1792.429491] env[61215]: DEBUG oslo_concurrency.lockutils [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Acquired lock "refresh_cache-c233ab81-232d-49be-a176-bf846f0d8cc3" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1792.429570] env[61215]: DEBUG nova.network.neutron [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1792.497492] env[61215]: DEBUG nova.network.neutron [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1792.561937] env[61215]: DEBUG nova.compute.manager [req-23e60c34-d004-4a09-a892-4c3769758d82 req-98ba33db-bed2-40c4-85ca-b36f676e18a9 service nova] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Received event network-vif-plugged-72b246c6-8f4e-4614-a716-f9ae45b2b757 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1792.562133] env[61215]: DEBUG oslo_concurrency.lockutils [req-23e60c34-d004-4a09-a892-4c3769758d82 req-98ba33db-bed2-40c4-85ca-b36f676e18a9 service nova] Acquiring lock "c233ab81-232d-49be-a176-bf846f0d8cc3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1792.562356] env[61215]: DEBUG oslo_concurrency.lockutils [req-23e60c34-d004-4a09-a892-4c3769758d82 req-98ba33db-bed2-40c4-85ca-b36f676e18a9 service nova] Lock "c233ab81-232d-49be-a176-bf846f0d8cc3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1792.562823] env[61215]: DEBUG oslo_concurrency.lockutils [req-23e60c34-d004-4a09-a892-4c3769758d82 req-98ba33db-bed2-40c4-85ca-b36f676e18a9 service nova] Lock "c233ab81-232d-49be-a176-bf846f0d8cc3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1792.563074] env[61215]: DEBUG nova.compute.manager [req-23e60c34-d004-4a09-a892-4c3769758d82 req-98ba33db-bed2-40c4-85ca-b36f676e18a9 service nova] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] No waiting events found dispatching network-vif-plugged-72b246c6-8f4e-4614-a716-f9ae45b2b757 {{(pid=61215) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1792.563262] env[61215]: WARNING nova.compute.manager [req-23e60c34-d004-4a09-a892-4c3769758d82 req-98ba33db-bed2-40c4-85ca-b36f676e18a9 service nova] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Received unexpected event network-vif-plugged-72b246c6-8f4e-4614-a716-f9ae45b2b757 for instance with vm_state building and task_state spawning. [ 1792.563696] env[61215]: DEBUG nova.compute.manager [req-23e60c34-d004-4a09-a892-4c3769758d82 req-98ba33db-bed2-40c4-85ca-b36f676e18a9 service nova] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Received event network-changed-72b246c6-8f4e-4614-a716-f9ae45b2b757 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1792.563696] env[61215]: DEBUG nova.compute.manager [req-23e60c34-d004-4a09-a892-4c3769758d82 req-98ba33db-bed2-40c4-85ca-b36f676e18a9 service nova] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Refreshing instance network info cache due to event network-changed-72b246c6-8f4e-4614-a716-f9ae45b2b757. {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 1792.563866] env[61215]: DEBUG oslo_concurrency.lockutils [req-23e60c34-d004-4a09-a892-4c3769758d82 req-98ba33db-bed2-40c4-85ca-b36f676e18a9 service nova] Acquiring lock "refresh_cache-c233ab81-232d-49be-a176-bf846f0d8cc3" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1792.737083] env[61215]: DEBUG nova.network.neutron [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Updating instance_info_cache with network_info: [{"id": "72b246c6-8f4e-4614-a716-f9ae45b2b757", "address": "fa:16:3e:96:fb:99", "network": {"id": "c7e2c78f-5d99-48bd-bb1f-f51c684a91b2", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-264815476-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "44797a7f59634ea692e532631427af87", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f096917-a0cf-4add-a9d2-23ca1c723b3b", "external-id": "nsx-vlan-transportzone-894", "segmentation_id": 894, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72b246c6-8f", "ovs_interfaceid": "72b246c6-8f4e-4614-a716-f9ae45b2b757", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1792.754023] env[61215]: DEBUG oslo_concurrency.lockutils [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Releasing lock "refresh_cache-c233ab81-232d-49be-a176-bf846f0d8cc3" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1792.754023] env[61215]: DEBUG nova.compute.manager [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Instance network_info: |[{"id": "72b246c6-8f4e-4614-a716-f9ae45b2b757", "address": "fa:16:3e:96:fb:99", "network": {"id": "c7e2c78f-5d99-48bd-bb1f-f51c684a91b2", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-264815476-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "44797a7f59634ea692e532631427af87", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f096917-a0cf-4add-a9d2-23ca1c723b3b", "external-id": "nsx-vlan-transportzone-894", "segmentation_id": 894, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72b246c6-8f", "ovs_interfaceid": "72b246c6-8f4e-4614-a716-f9ae45b2b757", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1792.754363] env[61215]: DEBUG oslo_concurrency.lockutils [req-23e60c34-d004-4a09-a892-4c3769758d82 req-98ba33db-bed2-40c4-85ca-b36f676e18a9 service nova] Acquired lock "refresh_cache-c233ab81-232d-49be-a176-bf846f0d8cc3" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1792.754551] env[61215]: DEBUG nova.network.neutron [req-23e60c34-d004-4a09-a892-4c3769758d82 req-98ba33db-bed2-40c4-85ca-b36f676e18a9 service nova] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Refreshing network info cache for port 72b246c6-8f4e-4614-a716-f9ae45b2b757 {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1792.755998] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:96:fb:99', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0f096917-a0cf-4add-a9d2-23ca1c723b3b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '72b246c6-8f4e-4614-a716-f9ae45b2b757', 'vif_model': 'vmxnet3'}] {{(pid=61215) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1792.764782] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Creating folder: Project (44797a7f59634ea692e532631427af87). Parent ref: group-v352463. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1792.765624] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-536090fc-f1ef-4edb-91d1-404d07a0f701 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.780435] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Created folder: Project (44797a7f59634ea692e532631427af87) in parent group-v352463. [ 1792.780863] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Creating folder: Instances. Parent ref: group-v352530. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1792.780863] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1178a7bb-d2b6-4811-bbd1-ea45f5420f97 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.792651] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Created folder: Instances in parent group-v352530. [ 1792.792913] env[61215]: DEBUG oslo.service.loopingcall [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1792.793126] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Creating VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1792.793342] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e6960312-bc75-4af9-8b2b-dc34508425e4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.813973] env[61215]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1792.813973] env[61215]: value = "task-1690369" [ 1792.813973] env[61215]: _type = "Task" [ 1792.813973] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.822283] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690369, 'name': CreateVM_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.078193] env[61215]: DEBUG nova.network.neutron [req-23e60c34-d004-4a09-a892-4c3769758d82 req-98ba33db-bed2-40c4-85ca-b36f676e18a9 service nova] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Updated VIF entry in instance network info cache for port 72b246c6-8f4e-4614-a716-f9ae45b2b757. {{(pid=61215) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1793.078871] env[61215]: DEBUG nova.network.neutron [req-23e60c34-d004-4a09-a892-4c3769758d82 req-98ba33db-bed2-40c4-85ca-b36f676e18a9 service nova] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Updating instance_info_cache with network_info: [{"id": "72b246c6-8f4e-4614-a716-f9ae45b2b757", "address": "fa:16:3e:96:fb:99", "network": {"id": "c7e2c78f-5d99-48bd-bb1f-f51c684a91b2", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-264815476-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "44797a7f59634ea692e532631427af87", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f096917-a0cf-4add-a9d2-23ca1c723b3b", "external-id": "nsx-vlan-transportzone-894", "segmentation_id": 894, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72b246c6-8f", "ovs_interfaceid": "72b246c6-8f4e-4614-a716-f9ae45b2b757", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1793.089562] env[61215]: DEBUG oslo_concurrency.lockutils [req-23e60c34-d004-4a09-a892-4c3769758d82 req-98ba33db-bed2-40c4-85ca-b36f676e18a9 service nova] Releasing lock "refresh_cache-c233ab81-232d-49be-a176-bf846f0d8cc3" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1793.323964] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690369, 'name': CreateVM_Task, 'duration_secs': 0.344611} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1793.324238] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Created VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1793.324850] env[61215]: DEBUG oslo_concurrency.lockutils [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1793.325178] env[61215]: DEBUG oslo_concurrency.lockutils [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1793.325358] env[61215]: DEBUG oslo_concurrency.lockutils [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1793.325636] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b067072-4739-4553-9fc9-700c3502381e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.330565] env[61215]: DEBUG oslo_vmware.api [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Waiting for the task: (returnval){ [ 1793.330565] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]5231dcf5-2b47-226e-2b1f-cb8284aa7018" [ 1793.330565] env[61215]: _type = "Task" [ 1793.330565] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.339222] env[61215]: DEBUG oslo_vmware.api [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5231dcf5-2b47-226e-2b1f-cb8284aa7018, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.841704] env[61215]: DEBUG oslo_concurrency.lockutils [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1793.841704] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Processing image e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1793.841704] env[61215]: DEBUG oslo_concurrency.lockutils [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1794.767584] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1794.767896] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1795.653586] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1795.653873] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1796.650367] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1796.676771] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1796.677045] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Starting heal instance info cache {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 1796.677119] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Rebuilding the list of instances to heal {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1796.698991] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1796.698991] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1796.698991] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1796.699175] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1796.699294] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1796.699483] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1796.699622] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1796.699772] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1796.699908] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1796.700039] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1796.700176] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Didn't find any instances for network info cache update. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 1796.700629] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1796.700777] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61215) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 1796.783255] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Acquiring lock "0d609df2-621c-456f-b8ce-a209e9052153" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1796.783547] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Lock "0d609df2-621c-456f-b8ce-a209e9052153" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1797.654760] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1798.654322] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1798.906372] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d3b902a4-d4d7-4d6e-88a3-44a06ffa516e tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Acquiring lock "c233ab81-232d-49be-a176-bf846f0d8cc3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1800.654373] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1800.665063] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1800.665296] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1800.665465] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1800.665623] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61215) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1800.666730] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55819a1a-1383-47b3-9e68-fa7d09d6d0d3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.675451] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a403679-aeab-4abc-8785-35363d70a0e9 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.689394] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f926e3b-17f9-4c6d-902f-f7c06bf81d10 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.695693] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7c2001c-3c6c-473f-b13a-b22afce55429 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.725616] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181321MB free_disk=173GB free_vcpus=48 pci_devices=None {{(pid=61215) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1800.725770] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1800.725958] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1800.880034] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1800.880151] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1800.880394] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance d49f702b-cd29-4491-938c-0291b351ef20 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1800.880580] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1800.880740] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1800.880892] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 8d4665c7-67de-4ab3-a8b7-596a5e1152ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1800.881071] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1800.881232] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 2e186217-c1e1-40c6-8d84-988f35f6b93d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1800.881402] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1800.881552] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance c233ab81-232d-49be-a176-bf846f0d8cc3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1800.893615] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance bb56c470-9f85-44b1-b1ec-f44236e9de51 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1800.904162] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance d0a5229f-8da2-40bb-af99-28f32923892f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1800.914925] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance ddaa12c8-88c6-4ba0-beec-cad92acd9768 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1800.924151] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance a17827ee-8ad2-459b-ba7d-f9f9be429e64 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1800.933012] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 96a536ea-a1c7-470e-8873-bc1e723efefa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1800.943194] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 40fe7cd9-7c99-4add-a2eb-429ff2aba7a2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1800.954066] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance e9369a71-fc94-4cdd-82c6-6308783581c1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1800.964418] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 38fe96cf-e570-4c0e-af6f-2f199efc06ff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1800.974146] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance f91efd4b-851e-44bc-9cf2-7be8a2d2d7df has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1800.983685] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 0576d0b5-3890-4e1d-b208-40d46c2fdae7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1800.993127] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance bf807d62-c8be-4819-9fc1-4b2d6d14cc39 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1801.002279] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 12825ddf-86ee-4500-b43b-cf480dc54f3a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1801.010849] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance a42577f4-29ba-446b-a561-745ff14d1696 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1801.019388] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance e4cf3e92-a1a6-47ac-8625-37cdbf96cb35 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1801.027660] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 0d609df2-621c-456f-b8ce-a209e9052153 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1801.027886] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1801.028051] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1801.043741] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Refreshing inventories for resource provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1801.058156] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Updating ProviderTree inventory for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1801.058398] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Updating inventory in ProviderTree for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1801.070495] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Refreshing aggregate associations for resource provider 1329e087-aa78-44a2-9687-63a2b1b33fd5, aggregates: None {{(pid=61215) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1801.088648] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Refreshing trait associations for resource provider 1329e087-aa78-44a2-9687-63a2b1b33fd5, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=61215) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1801.383701] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d54ca416-8a68-4805-b060-0e04800c5ac2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.391513] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3836ec39-8af5-40df-9fb4-40731aa7a916 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.421661] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e799f704-4c8b-4ba1-b462-402f131cfb1b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.428944] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1e1f186-c3e9-48cf-a35a-900f1761c993 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.441790] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1801.449871] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1801.464728] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61215) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1801.464915] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.739s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1801.465161] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1801.465305] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Cleaning up deleted instances {{(pid=61215) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11220}} [ 1801.473612] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] There are 0 instances to clean {{(pid=61215) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 1801.473820] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1803.660282] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1803.660571] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Cleaning up deleted instances with incomplete migration {{(pid=61215) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11258}} [ 1832.921775] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1832.945258] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Getting list of instances from cluster (obj){ [ 1832.945258] env[61215]: value = "domain-c8" [ 1832.945258] env[61215]: _type = "ClusterComputeResource" [ 1832.945258] env[61215]: } {{(pid=61215) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1832.946881] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d47327d-9355-479c-b6cc-9c7e071d7614 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.964151] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Got total of 10 instances {{(pid=61215) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1832.964328] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Triggering sync for uuid 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d {{(pid=61215) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10330}} [ 1832.964534] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Triggering sync for uuid 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9 {{(pid=61215) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10330}} [ 1832.964700] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Triggering sync for uuid d49f702b-cd29-4491-938c-0291b351ef20 {{(pid=61215) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10330}} [ 1832.964858] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Triggering sync for uuid fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3 {{(pid=61215) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10330}} [ 1832.965060] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Triggering sync for uuid 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f {{(pid=61215) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10330}} [ 1832.965249] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Triggering sync for uuid 8d4665c7-67de-4ab3-a8b7-596a5e1152ce {{(pid=61215) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10330}} [ 1832.965407] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Triggering sync for uuid c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9 {{(pid=61215) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10330}} [ 1832.965556] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Triggering sync for uuid 2e186217-c1e1-40c6-8d84-988f35f6b93d {{(pid=61215) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10330}} [ 1832.965706] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Triggering sync for uuid a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8 {{(pid=61215) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10330}} [ 1832.965854] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Triggering sync for uuid c233ab81-232d-49be-a176-bf846f0d8cc3 {{(pid=61215) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10330}} [ 1832.966186] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "85d35c2a-bdfe-400b-a90c-082b9ae4ce7d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1832.966463] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "44c01d19-58c0-43e6-ab53-d2a6e1edd3b9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1832.966679] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "d49f702b-cd29-4491-938c-0291b351ef20" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1832.966884] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1832.967101] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1832.967311] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "8d4665c7-67de-4ab3-a8b7-596a5e1152ce" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1832.967505] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1832.967732] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "2e186217-c1e1-40c6-8d84-988f35f6b93d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1832.967937] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1832.968155] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "c233ab81-232d-49be-a176-bf846f0d8cc3" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1839.405608] env[61215]: WARNING oslo_vmware.rw_handles [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1839.405608] env[61215]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1839.405608] env[61215]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1839.405608] env[61215]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1839.405608] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1839.405608] env[61215]: ERROR oslo_vmware.rw_handles response.begin() [ 1839.405608] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1839.405608] env[61215]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1839.405608] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1839.405608] env[61215]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1839.405608] env[61215]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1839.405608] env[61215]: ERROR oslo_vmware.rw_handles [ 1839.406305] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Downloaded image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to vmware_temp/3762a0e8-cdfa-4d77-ae93-b432854d7548/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1839.407932] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Caching image {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1839.408638] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Copying Virtual Disk [datastore1] vmware_temp/3762a0e8-cdfa-4d77-ae93-b432854d7548/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk to [datastore1] vmware_temp/3762a0e8-cdfa-4d77-ae93-b432854d7548/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk {{(pid=61215) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1839.408638] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b470f83d-9853-4112-bba9-a64309caedf9 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.417082] env[61215]: DEBUG oslo_vmware.api [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Waiting for the task: (returnval){ [ 1839.417082] env[61215]: value = "task-1690370" [ 1839.417082] env[61215]: _type = "Task" [ 1839.417082] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1839.424739] env[61215]: DEBUG oslo_vmware.api [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Task: {'id': task-1690370, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.927663] env[61215]: DEBUG oslo_vmware.exceptions [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Fault InvalidArgument not matched. {{(pid=61215) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1839.927996] env[61215]: DEBUG oslo_concurrency.lockutils [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1839.928601] env[61215]: ERROR nova.compute.manager [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1839.928601] env[61215]: Faults: ['InvalidArgument'] [ 1839.928601] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Traceback (most recent call last): [ 1839.928601] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1839.928601] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] yield resources [ 1839.928601] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1839.928601] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] self.driver.spawn(context, instance, image_meta, [ 1839.928601] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1839.928601] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1839.928601] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1839.928601] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] self._fetch_image_if_missing(context, vi) [ 1839.928601] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1839.928995] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] image_cache(vi, tmp_image_ds_loc) [ 1839.928995] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1839.928995] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] vm_util.copy_virtual_disk( [ 1839.928995] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1839.928995] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] session._wait_for_task(vmdk_copy_task) [ 1839.928995] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1839.928995] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] return self.wait_for_task(task_ref) [ 1839.928995] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1839.928995] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] return evt.wait() [ 1839.928995] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1839.928995] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] result = hub.switch() [ 1839.928995] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1839.928995] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] return self.greenlet.switch() [ 1839.929397] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1839.929397] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] self.f(*self.args, **self.kw) [ 1839.929397] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1839.929397] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] raise exceptions.translate_fault(task_info.error) [ 1839.929397] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1839.929397] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Faults: ['InvalidArgument'] [ 1839.929397] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] [ 1839.929397] env[61215]: INFO nova.compute.manager [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Terminating instance [ 1839.930512] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1839.930745] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1839.930982] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0c1d1851-4be0-4226-b6ef-ebeadc9618f4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.933243] env[61215]: DEBUG nova.compute.manager [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1839.933441] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1839.934182] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-580aa1a2-5880-40d9-bb6e-498995e9e547 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.941286] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Unregistering the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1839.941515] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d36623eb-7bb0-457c-b735-778831f2f5b2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.943720] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1839.943895] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61215) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1839.944875] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5ab6801-5402-4869-a596-daf2ef4b5255 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.950895] env[61215]: DEBUG oslo_vmware.api [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Waiting for the task: (returnval){ [ 1839.950895] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52e5a50d-df5a-c83a-2232-cd689815533c" [ 1839.950895] env[61215]: _type = "Task" [ 1839.950895] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1839.965458] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Preparing fetch location {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1839.965694] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Creating directory with path [datastore1] vmware_temp/e9eaf53d-7a4b-4342-9316-f63d14708923/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1839.965913] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4d75fa87-e9b2-4dc3-918a-d1075eda37d8 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.976517] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Created directory with path [datastore1] vmware_temp/e9eaf53d-7a4b-4342-9316-f63d14708923/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1839.976722] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Fetch image to [datastore1] vmware_temp/e9eaf53d-7a4b-4342-9316-f63d14708923/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1839.976896] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to [datastore1] vmware_temp/e9eaf53d-7a4b-4342-9316-f63d14708923/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1839.977702] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4810ec0e-a7f7-4f02-89b0-13a7754a6d18 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.984860] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55f3c41d-8e4a-40ba-b391-5a103bff9dee {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.994504] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4773e728-678a-4489-8b89-a6e8d528b9ea {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.026373] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a62b484f-1b6f-4b98-82c0-f680fcae5df6 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.029037] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Unregistered the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1840.029248] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Deleting contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1840.029426] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Deleting the datastore file [datastore1] 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1840.029653] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f890cc1b-cc62-4fa8-b014-5cf2c4f3a14a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.035053] env[61215]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-839210d3-f91d-4c34-a97e-06985eb7da06 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.037133] env[61215]: DEBUG oslo_vmware.api [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Waiting for the task: (returnval){ [ 1840.037133] env[61215]: value = "task-1690372" [ 1840.037133] env[61215]: _type = "Task" [ 1840.037133] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1840.044825] env[61215]: DEBUG oslo_vmware.api [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Task: {'id': task-1690372, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.058756] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1840.113383] env[61215]: DEBUG oslo_vmware.rw_handles [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e9eaf53d-7a4b-4342-9316-f63d14708923/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1840.175594] env[61215]: DEBUG oslo_vmware.rw_handles [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Completed reading data from the image iterator. {{(pid=61215) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1840.175794] env[61215]: DEBUG oslo_vmware.rw_handles [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e9eaf53d-7a4b-4342-9316-f63d14708923/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1840.548103] env[61215]: DEBUG oslo_vmware.api [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Task: {'id': task-1690372, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073775} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1840.548396] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1840.548555] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Deleted contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1840.548751] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1840.548944] env[61215]: INFO nova.compute.manager [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1840.551064] env[61215]: DEBUG nova.compute.claims [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Aborting claim: {{(pid=61215) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1840.551244] env[61215]: DEBUG oslo_concurrency.lockutils [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1840.551468] env[61215]: DEBUG oslo_concurrency.lockutils [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1840.890614] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8d55b66-1fb9-4b98-9ff6-5667ae9fabd7 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.898170] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e0569dc-ca63-4fb4-9309-4a25c0b9d0fe {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.927044] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f75856b-fb2b-40af-b3e5-fc785c739d78 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.933810] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23062327-8d2e-4e52-a002-a0679b582972 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.946412] env[61215]: DEBUG nova.compute.provider_tree [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1840.956653] env[61215]: DEBUG nova.scheduler.client.report [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1840.989022] env[61215]: DEBUG oslo_concurrency.lockutils [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.437s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1840.989587] env[61215]: ERROR nova.compute.manager [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1840.989587] env[61215]: Faults: ['InvalidArgument'] [ 1840.989587] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Traceback (most recent call last): [ 1840.989587] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1840.989587] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] self.driver.spawn(context, instance, image_meta, [ 1840.989587] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1840.989587] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1840.989587] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1840.989587] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] self._fetch_image_if_missing(context, vi) [ 1840.989587] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1840.989587] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] image_cache(vi, tmp_image_ds_loc) [ 1840.989587] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1840.989996] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] vm_util.copy_virtual_disk( [ 1840.989996] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1840.989996] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] session._wait_for_task(vmdk_copy_task) [ 1840.989996] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1840.989996] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] return self.wait_for_task(task_ref) [ 1840.989996] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1840.989996] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] return evt.wait() [ 1840.989996] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1840.989996] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] result = hub.switch() [ 1840.989996] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1840.989996] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] return self.greenlet.switch() [ 1840.989996] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1840.989996] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] self.f(*self.args, **self.kw) [ 1840.990419] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1840.990419] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] raise exceptions.translate_fault(task_info.error) [ 1840.990419] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1840.990419] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Faults: ['InvalidArgument'] [ 1840.990419] env[61215]: ERROR nova.compute.manager [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] [ 1840.990419] env[61215]: DEBUG nova.compute.utils [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] VimFaultException {{(pid=61215) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1840.991974] env[61215]: DEBUG nova.compute.manager [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Build of instance 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d was re-scheduled: A specified parameter was not correct: fileType [ 1840.991974] env[61215]: Faults: ['InvalidArgument'] {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1840.992338] env[61215]: DEBUG nova.compute.manager [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Unplugging VIFs for instance {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1840.992521] env[61215]: DEBUG nova.compute.manager [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1840.992699] env[61215]: DEBUG nova.compute.manager [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1840.992930] env[61215]: DEBUG nova.network.neutron [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1841.304856] env[61215]: DEBUG nova.network.neutron [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1841.315585] env[61215]: INFO nova.compute.manager [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Took 0.32 seconds to deallocate network for instance. [ 1841.406064] env[61215]: INFO nova.scheduler.client.report [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Deleted allocations for instance 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d [ 1841.425765] env[61215]: DEBUG oslo_concurrency.lockutils [None req-6cdfdf57-9e17-4441-8afd-156999eeb856 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Lock "85d35c2a-bdfe-400b-a90c-082b9ae4ce7d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 560.591s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1841.426858] env[61215]: DEBUG oslo_concurrency.lockutils [None req-e2747d29-820f-4f72-8540-d12fdd6496d7 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Lock "85d35c2a-bdfe-400b-a90c-082b9ae4ce7d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 353.515s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1841.427098] env[61215]: DEBUG oslo_concurrency.lockutils [None req-e2747d29-820f-4f72-8540-d12fdd6496d7 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Acquiring lock "85d35c2a-bdfe-400b-a90c-082b9ae4ce7d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1841.427311] env[61215]: DEBUG oslo_concurrency.lockutils [None req-e2747d29-820f-4f72-8540-d12fdd6496d7 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Lock "85d35c2a-bdfe-400b-a90c-082b9ae4ce7d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1841.427479] env[61215]: DEBUG oslo_concurrency.lockutils [None req-e2747d29-820f-4f72-8540-d12fdd6496d7 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Lock "85d35c2a-bdfe-400b-a90c-082b9ae4ce7d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1841.429747] env[61215]: INFO nova.compute.manager [None req-e2747d29-820f-4f72-8540-d12fdd6496d7 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Terminating instance [ 1841.431149] env[61215]: DEBUG nova.compute.manager [None req-e2747d29-820f-4f72-8540-d12fdd6496d7 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1841.431345] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-e2747d29-820f-4f72-8540-d12fdd6496d7 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1841.431873] env[61215]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fa730297-5fb1-4381-a481-2ab70c904891 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.441231] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38add3c2-d19c-4941-9fe7-c5d326823619 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.452501] env[61215]: DEBUG nova.compute.manager [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1841.474161] env[61215]: WARNING nova.virt.vmwareapi.vmops [None req-e2747d29-820f-4f72-8540-d12fdd6496d7 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d could not be found. [ 1841.474876] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-e2747d29-820f-4f72-8540-d12fdd6496d7 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1841.475350] env[61215]: INFO nova.compute.manager [None req-e2747d29-820f-4f72-8540-d12fdd6496d7 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1841.475523] env[61215]: DEBUG oslo.service.loopingcall [None req-e2747d29-820f-4f72-8540-d12fdd6496d7 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1841.475821] env[61215]: DEBUG nova.compute.manager [-] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1841.475979] env[61215]: DEBUG nova.network.neutron [-] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1841.503331] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1841.503331] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1841.503482] env[61215]: INFO nova.compute.claims [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1841.514303] env[61215]: DEBUG nova.network.neutron [-] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1841.540035] env[61215]: INFO nova.compute.manager [-] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] Took 0.06 seconds to deallocate network for instance. [ 1841.651788] env[61215]: DEBUG oslo_concurrency.lockutils [None req-e2747d29-820f-4f72-8540-d12fdd6496d7 tempest-ListServerFiltersTestJSON-701890871 tempest-ListServerFiltersTestJSON-701890871-project-member] Lock "85d35c2a-bdfe-400b-a90c-082b9ae4ce7d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.225s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1841.652702] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "85d35c2a-bdfe-400b-a90c-082b9ae4ce7d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 8.686s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1841.652843] env[61215]: INFO nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 85d35c2a-bdfe-400b-a90c-082b9ae4ce7d] During sync_power_state the instance has a pending task (deleting). Skip. [ 1841.652997] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "85d35c2a-bdfe-400b-a90c-082b9ae4ce7d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1841.863738] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e77b7340-e06c-4fe1-a27b-853f00bfdf20 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.872159] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d07f9d3-8977-46bf-af06-2ed1a81d754a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.902691] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64e1e590-0d59-4030-a892-4f3ca53671ec {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.910289] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fcf92a0-0e45-4ff2-857e-3321c4240da6 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.924528] env[61215]: DEBUG nova.compute.provider_tree [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1841.933223] env[61215]: DEBUG nova.scheduler.client.report [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1841.950929] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.449s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1841.951354] env[61215]: DEBUG nova.compute.manager [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Start building networks asynchronously for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1841.985354] env[61215]: DEBUG nova.compute.utils [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Using /dev/sd instead of None {{(pid=61215) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1841.986957] env[61215]: DEBUG nova.compute.manager [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Allocating IP information in the background. {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1841.987148] env[61215]: DEBUG nova.network.neutron [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] allocate_for_instance() {{(pid=61215) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1841.999016] env[61215]: DEBUG nova.compute.manager [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Start building block device mappings for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1842.067158] env[61215]: DEBUG nova.policy [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c1cfa32e7efb40b88a7565fb525635bf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '33bc963e963d493f8bad1328351cd968', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61215) authorize /opt/stack/nova/nova/policy.py:203}} [ 1842.070431] env[61215]: DEBUG nova.compute.manager [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Start spawning the instance on the hypervisor. {{(pid=61215) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1842.095959] env[61215]: DEBUG nova.virt.hardware [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:05:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:04:45Z,direct_url=,disk_format='vmdk',id=e91f0c25-9ff9-4937-8440-f47cfb2028bc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9bc3f57c82894c5d9e08b66e77a25dc5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:04:46Z,virtual_size=,visibility=), allow threads: False {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1842.096223] env[61215]: DEBUG nova.virt.hardware [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Flavor limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1842.096394] env[61215]: DEBUG nova.virt.hardware [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Image limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1842.096612] env[61215]: DEBUG nova.virt.hardware [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Flavor pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1842.096771] env[61215]: DEBUG nova.virt.hardware [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Image pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1842.096927] env[61215]: DEBUG nova.virt.hardware [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1842.097159] env[61215]: DEBUG nova.virt.hardware [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1842.097329] env[61215]: DEBUG nova.virt.hardware [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1842.097501] env[61215]: DEBUG nova.virt.hardware [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Got 1 possible topologies {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1842.097759] env[61215]: DEBUG nova.virt.hardware [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1842.097978] env[61215]: DEBUG nova.virt.hardware [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1842.098862] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c24a4ce-828b-4fad-b29c-4fc096f18283 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.106988] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75fd8509-8d60-4605-9aa8-b1b5544915c4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.389742] env[61215]: DEBUG nova.network.neutron [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Successfully created port: 3911b2de-be57-4735-af69-fac5973d8cb0 {{(pid=61215) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1842.729637] env[61215]: DEBUG nova.network.neutron [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Successfully created port: 54c17a7e-00f1-483c-923f-a2ae3f199d1c {{(pid=61215) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1843.186852] env[61215]: DEBUG nova.network.neutron [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Successfully created port: d71139e5-827d-49b3-98d2-ba6f2187dc35 {{(pid=61215) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1843.755206] env[61215]: DEBUG nova.compute.manager [req-fdaf6d32-406c-44b2-bf2d-bb2fb546365b req-e990c277-9b6d-4b24-927d-56a9e7397681 service nova] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Received event network-vif-plugged-3911b2de-be57-4735-af69-fac5973d8cb0 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1843.755431] env[61215]: DEBUG oslo_concurrency.lockutils [req-fdaf6d32-406c-44b2-bf2d-bb2fb546365b req-e990c277-9b6d-4b24-927d-56a9e7397681 service nova] Acquiring lock "bb56c470-9f85-44b1-b1ec-f44236e9de51-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1843.755679] env[61215]: DEBUG oslo_concurrency.lockutils [req-fdaf6d32-406c-44b2-bf2d-bb2fb546365b req-e990c277-9b6d-4b24-927d-56a9e7397681 service nova] Lock "bb56c470-9f85-44b1-b1ec-f44236e9de51-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1843.755886] env[61215]: DEBUG oslo_concurrency.lockutils [req-fdaf6d32-406c-44b2-bf2d-bb2fb546365b req-e990c277-9b6d-4b24-927d-56a9e7397681 service nova] Lock "bb56c470-9f85-44b1-b1ec-f44236e9de51-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1843.756099] env[61215]: DEBUG nova.compute.manager [req-fdaf6d32-406c-44b2-bf2d-bb2fb546365b req-e990c277-9b6d-4b24-927d-56a9e7397681 service nova] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] No waiting events found dispatching network-vif-plugged-3911b2de-be57-4735-af69-fac5973d8cb0 {{(pid=61215) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1843.756278] env[61215]: WARNING nova.compute.manager [req-fdaf6d32-406c-44b2-bf2d-bb2fb546365b req-e990c277-9b6d-4b24-927d-56a9e7397681 service nova] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Received unexpected event network-vif-plugged-3911b2de-be57-4735-af69-fac5973d8cb0 for instance with vm_state building and task_state spawning. [ 1844.076948] env[61215]: DEBUG nova.network.neutron [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Successfully updated port: 3911b2de-be57-4735-af69-fac5973d8cb0 {{(pid=61215) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1844.844091] env[61215]: DEBUG nova.network.neutron [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Successfully updated port: 54c17a7e-00f1-483c-923f-a2ae3f199d1c {{(pid=61215) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1845.620132] env[61215]: DEBUG nova.network.neutron [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Successfully updated port: d71139e5-827d-49b3-98d2-ba6f2187dc35 {{(pid=61215) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1845.627333] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Acquiring lock "refresh_cache-bb56c470-9f85-44b1-b1ec-f44236e9de51" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1845.627472] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Acquired lock "refresh_cache-bb56c470-9f85-44b1-b1ec-f44236e9de51" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1845.627622] env[61215]: DEBUG nova.network.neutron [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1845.709796] env[61215]: DEBUG nova.network.neutron [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1845.783913] env[61215]: DEBUG nova.compute.manager [req-a6018053-a569-454f-b192-1b21cf346c77 req-fdab5834-01d5-4e90-8c44-40a632c94e27 service nova] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Received event network-changed-3911b2de-be57-4735-af69-fac5973d8cb0 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1845.784153] env[61215]: DEBUG nova.compute.manager [req-a6018053-a569-454f-b192-1b21cf346c77 req-fdab5834-01d5-4e90-8c44-40a632c94e27 service nova] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Refreshing instance network info cache due to event network-changed-3911b2de-be57-4735-af69-fac5973d8cb0. {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 1845.784352] env[61215]: DEBUG oslo_concurrency.lockutils [req-a6018053-a569-454f-b192-1b21cf346c77 req-fdab5834-01d5-4e90-8c44-40a632c94e27 service nova] Acquiring lock "refresh_cache-bb56c470-9f85-44b1-b1ec-f44236e9de51" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1846.303761] env[61215]: DEBUG nova.network.neutron [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Updating instance_info_cache with network_info: [{"id": "3911b2de-be57-4735-af69-fac5973d8cb0", "address": "fa:16:3e:8a:85:bf", "network": {"id": "a4106f3a-0ac7-42c3-ae75-c701bf95b5f9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-110715612", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.249", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33bc963e963d493f8bad1328351cd968", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b931c4c-f73c-4fbd-9c9f-0270834cc69e", "external-id": "nsx-vlan-transportzone-177", "segmentation_id": 177, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3911b2de-be", "ovs_interfaceid": "3911b2de-be57-4735-af69-fac5973d8cb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "54c17a7e-00f1-483c-923f-a2ae3f199d1c", "address": "fa:16:3e:1e:8f:17", "network": {"id": "22a1e126-1f23-45be-b6b9-4f79e977db8e", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-64923139", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.252", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "33bc963e963d493f8bad1328351cd968", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e", "external-id": "nsx-vlan-transportzone-988", "segmentation_id": 988, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54c17a7e-00", "ovs_interfaceid": "54c17a7e-00f1-483c-923f-a2ae3f199d1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "d71139e5-827d-49b3-98d2-ba6f2187dc35", "address": "fa:16:3e:8d:9d:90", "network": {"id": "a4106f3a-0ac7-42c3-ae75-c701bf95b5f9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-110715612", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.138", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33bc963e963d493f8bad1328351cd968", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b931c4c-f73c-4fbd-9c9f-0270834cc69e", "external-id": "nsx-vlan-transportzone-177", "segmentation_id": 177, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd71139e5-82", "ovs_interfaceid": "d71139e5-827d-49b3-98d2-ba6f2187dc35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1846.321516] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Releasing lock "refresh_cache-bb56c470-9f85-44b1-b1ec-f44236e9de51" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1846.321909] env[61215]: DEBUG nova.compute.manager [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Instance network_info: |[{"id": "3911b2de-be57-4735-af69-fac5973d8cb0", "address": "fa:16:3e:8a:85:bf", "network": {"id": "a4106f3a-0ac7-42c3-ae75-c701bf95b5f9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-110715612", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.249", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33bc963e963d493f8bad1328351cd968", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b931c4c-f73c-4fbd-9c9f-0270834cc69e", "external-id": "nsx-vlan-transportzone-177", "segmentation_id": 177, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3911b2de-be", "ovs_interfaceid": "3911b2de-be57-4735-af69-fac5973d8cb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "54c17a7e-00f1-483c-923f-a2ae3f199d1c", "address": "fa:16:3e:1e:8f:17", "network": {"id": "22a1e126-1f23-45be-b6b9-4f79e977db8e", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-64923139", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.252", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "33bc963e963d493f8bad1328351cd968", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e", "external-id": "nsx-vlan-transportzone-988", "segmentation_id": 988, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54c17a7e-00", "ovs_interfaceid": "54c17a7e-00f1-483c-923f-a2ae3f199d1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "d71139e5-827d-49b3-98d2-ba6f2187dc35", "address": "fa:16:3e:8d:9d:90", "network": {"id": "a4106f3a-0ac7-42c3-ae75-c701bf95b5f9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-110715612", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.138", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33bc963e963d493f8bad1328351cd968", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b931c4c-f73c-4fbd-9c9f-0270834cc69e", "external-id": "nsx-vlan-transportzone-177", "segmentation_id": 177, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd71139e5-82", "ovs_interfaceid": "d71139e5-827d-49b3-98d2-ba6f2187dc35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1846.322240] env[61215]: DEBUG oslo_concurrency.lockutils [req-a6018053-a569-454f-b192-1b21cf346c77 req-fdab5834-01d5-4e90-8c44-40a632c94e27 service nova] Acquired lock "refresh_cache-bb56c470-9f85-44b1-b1ec-f44236e9de51" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1846.322425] env[61215]: DEBUG nova.network.neutron [req-a6018053-a569-454f-b192-1b21cf346c77 req-fdab5834-01d5-4e90-8c44-40a632c94e27 service nova] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Refreshing network info cache for port 3911b2de-be57-4735-af69-fac5973d8cb0 {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1846.323573] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8a:85:bf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2b931c4c-f73c-4fbd-9c9f-0270834cc69e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3911b2de-be57-4735-af69-fac5973d8cb0', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:1e:8f:17', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '54c17a7e-00f1-483c-923f-a2ae3f199d1c', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:8d:9d:90', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2b931c4c-f73c-4fbd-9c9f-0270834cc69e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd71139e5-827d-49b3-98d2-ba6f2187dc35', 'vif_model': 'vmxnet3'}] {{(pid=61215) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1846.334684] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Creating folder: Project (33bc963e963d493f8bad1328351cd968). Parent ref: group-v352463. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1846.342423] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bf25b569-8f36-412a-abab-96120c7710d0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.354553] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Created folder: Project (33bc963e963d493f8bad1328351cd968) in parent group-v352463. [ 1846.354790] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Creating folder: Instances. Parent ref: group-v352533. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1846.355100] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4967e1d2-2b19-4d64-bd22-d81feca0b11a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.365694] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Created folder: Instances in parent group-v352533. [ 1846.365944] env[61215]: DEBUG oslo.service.loopingcall [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1846.366244] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Creating VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1846.366475] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b1048330-207f-407d-876c-5b2112a1053a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.391658] env[61215]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1846.391658] env[61215]: value = "task-1690375" [ 1846.391658] env[61215]: _type = "Task" [ 1846.391658] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1846.400991] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690375, 'name': CreateVM_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.693911] env[61215]: DEBUG nova.network.neutron [req-a6018053-a569-454f-b192-1b21cf346c77 req-fdab5834-01d5-4e90-8c44-40a632c94e27 service nova] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Updated VIF entry in instance network info cache for port 3911b2de-be57-4735-af69-fac5973d8cb0. {{(pid=61215) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1846.694611] env[61215]: DEBUG nova.network.neutron [req-a6018053-a569-454f-b192-1b21cf346c77 req-fdab5834-01d5-4e90-8c44-40a632c94e27 service nova] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Updating instance_info_cache with network_info: [{"id": "3911b2de-be57-4735-af69-fac5973d8cb0", "address": "fa:16:3e:8a:85:bf", "network": {"id": "a4106f3a-0ac7-42c3-ae75-c701bf95b5f9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-110715612", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.249", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33bc963e963d493f8bad1328351cd968", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b931c4c-f73c-4fbd-9c9f-0270834cc69e", "external-id": "nsx-vlan-transportzone-177", "segmentation_id": 177, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3911b2de-be", "ovs_interfaceid": "3911b2de-be57-4735-af69-fac5973d8cb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "54c17a7e-00f1-483c-923f-a2ae3f199d1c", "address": "fa:16:3e:1e:8f:17", "network": {"id": "22a1e126-1f23-45be-b6b9-4f79e977db8e", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-64923139", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.252", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "33bc963e963d493f8bad1328351cd968", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e", "external-id": "nsx-vlan-transportzone-988", "segmentation_id": 988, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54c17a7e-00", "ovs_interfaceid": "54c17a7e-00f1-483c-923f-a2ae3f199d1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "d71139e5-827d-49b3-98d2-ba6f2187dc35", "address": "fa:16:3e:8d:9d:90", "network": {"id": "a4106f3a-0ac7-42c3-ae75-c701bf95b5f9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-110715612", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.138", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33bc963e963d493f8bad1328351cd968", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b931c4c-f73c-4fbd-9c9f-0270834cc69e", "external-id": "nsx-vlan-transportzone-177", "segmentation_id": 177, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd71139e5-82", "ovs_interfaceid": "d71139e5-827d-49b3-98d2-ba6f2187dc35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1846.707300] env[61215]: DEBUG oslo_concurrency.lockutils [req-a6018053-a569-454f-b192-1b21cf346c77 req-fdab5834-01d5-4e90-8c44-40a632c94e27 service nova] Releasing lock "refresh_cache-bb56c470-9f85-44b1-b1ec-f44236e9de51" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1846.707548] env[61215]: DEBUG nova.compute.manager [req-a6018053-a569-454f-b192-1b21cf346c77 req-fdab5834-01d5-4e90-8c44-40a632c94e27 service nova] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Received event network-vif-plugged-54c17a7e-00f1-483c-923f-a2ae3f199d1c {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1846.707746] env[61215]: DEBUG oslo_concurrency.lockutils [req-a6018053-a569-454f-b192-1b21cf346c77 req-fdab5834-01d5-4e90-8c44-40a632c94e27 service nova] Acquiring lock "bb56c470-9f85-44b1-b1ec-f44236e9de51-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1846.707957] env[61215]: DEBUG oslo_concurrency.lockutils [req-a6018053-a569-454f-b192-1b21cf346c77 req-fdab5834-01d5-4e90-8c44-40a632c94e27 service nova] Lock "bb56c470-9f85-44b1-b1ec-f44236e9de51-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1846.708139] env[61215]: DEBUG oslo_concurrency.lockutils [req-a6018053-a569-454f-b192-1b21cf346c77 req-fdab5834-01d5-4e90-8c44-40a632c94e27 service nova] Lock "bb56c470-9f85-44b1-b1ec-f44236e9de51-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1846.708313] env[61215]: DEBUG nova.compute.manager [req-a6018053-a569-454f-b192-1b21cf346c77 req-fdab5834-01d5-4e90-8c44-40a632c94e27 service nova] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] No waiting events found dispatching network-vif-plugged-54c17a7e-00f1-483c-923f-a2ae3f199d1c {{(pid=61215) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1846.708484] env[61215]: WARNING nova.compute.manager [req-a6018053-a569-454f-b192-1b21cf346c77 req-fdab5834-01d5-4e90-8c44-40a632c94e27 service nova] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Received unexpected event network-vif-plugged-54c17a7e-00f1-483c-923f-a2ae3f199d1c for instance with vm_state building and task_state spawning. [ 1846.708651] env[61215]: DEBUG nova.compute.manager [req-a6018053-a569-454f-b192-1b21cf346c77 req-fdab5834-01d5-4e90-8c44-40a632c94e27 service nova] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Received event network-changed-54c17a7e-00f1-483c-923f-a2ae3f199d1c {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1846.708814] env[61215]: DEBUG nova.compute.manager [req-a6018053-a569-454f-b192-1b21cf346c77 req-fdab5834-01d5-4e90-8c44-40a632c94e27 service nova] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Refreshing instance network info cache due to event network-changed-54c17a7e-00f1-483c-923f-a2ae3f199d1c. {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 1846.709013] env[61215]: DEBUG oslo_concurrency.lockutils [req-a6018053-a569-454f-b192-1b21cf346c77 req-fdab5834-01d5-4e90-8c44-40a632c94e27 service nova] Acquiring lock "refresh_cache-bb56c470-9f85-44b1-b1ec-f44236e9de51" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1846.709164] env[61215]: DEBUG oslo_concurrency.lockutils [req-a6018053-a569-454f-b192-1b21cf346c77 req-fdab5834-01d5-4e90-8c44-40a632c94e27 service nova] Acquired lock "refresh_cache-bb56c470-9f85-44b1-b1ec-f44236e9de51" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1846.709324] env[61215]: DEBUG nova.network.neutron [req-a6018053-a569-454f-b192-1b21cf346c77 req-fdab5834-01d5-4e90-8c44-40a632c94e27 service nova] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Refreshing network info cache for port 54c17a7e-00f1-483c-923f-a2ae3f199d1c {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1846.901506] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690375, 'name': CreateVM_Task, 'duration_secs': 0.380331} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1846.903855] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Created VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1846.904734] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1846.904912] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1846.905273] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1846.905823] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22ae378e-02a4-4bf6-a58e-30f82f88056c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.910475] env[61215]: DEBUG oslo_vmware.api [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Waiting for the task: (returnval){ [ 1846.910475] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]525e661f-7f4f-cc0c-c861-63e06b5d579e" [ 1846.910475] env[61215]: _type = "Task" [ 1846.910475] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1846.918902] env[61215]: DEBUG oslo_vmware.api [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]525e661f-7f4f-cc0c-c861-63e06b5d579e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.029473] env[61215]: DEBUG nova.network.neutron [req-a6018053-a569-454f-b192-1b21cf346c77 req-fdab5834-01d5-4e90-8c44-40a632c94e27 service nova] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Updated VIF entry in instance network info cache for port 54c17a7e-00f1-483c-923f-a2ae3f199d1c. {{(pid=61215) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1847.029940] env[61215]: DEBUG nova.network.neutron [req-a6018053-a569-454f-b192-1b21cf346c77 req-fdab5834-01d5-4e90-8c44-40a632c94e27 service nova] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Updating instance_info_cache with network_info: [{"id": "3911b2de-be57-4735-af69-fac5973d8cb0", "address": "fa:16:3e:8a:85:bf", "network": {"id": "a4106f3a-0ac7-42c3-ae75-c701bf95b5f9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-110715612", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.249", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33bc963e963d493f8bad1328351cd968", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b931c4c-f73c-4fbd-9c9f-0270834cc69e", "external-id": "nsx-vlan-transportzone-177", "segmentation_id": 177, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3911b2de-be", "ovs_interfaceid": "3911b2de-be57-4735-af69-fac5973d8cb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "54c17a7e-00f1-483c-923f-a2ae3f199d1c", "address": "fa:16:3e:1e:8f:17", "network": {"id": "22a1e126-1f23-45be-b6b9-4f79e977db8e", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-64923139", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.252", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "33bc963e963d493f8bad1328351cd968", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e", "external-id": "nsx-vlan-transportzone-988", "segmentation_id": 988, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54c17a7e-00", "ovs_interfaceid": "54c17a7e-00f1-483c-923f-a2ae3f199d1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "d71139e5-827d-49b3-98d2-ba6f2187dc35", "address": "fa:16:3e:8d:9d:90", "network": {"id": "a4106f3a-0ac7-42c3-ae75-c701bf95b5f9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-110715612", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.138", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33bc963e963d493f8bad1328351cd968", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b931c4c-f73c-4fbd-9c9f-0270834cc69e", "external-id": "nsx-vlan-transportzone-177", "segmentation_id": 177, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd71139e5-82", "ovs_interfaceid": "d71139e5-827d-49b3-98d2-ba6f2187dc35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1847.040685] env[61215]: DEBUG oslo_concurrency.lockutils [req-a6018053-a569-454f-b192-1b21cf346c77 req-fdab5834-01d5-4e90-8c44-40a632c94e27 service nova] Releasing lock "refresh_cache-bb56c470-9f85-44b1-b1ec-f44236e9de51" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1847.040932] env[61215]: DEBUG nova.compute.manager [req-a6018053-a569-454f-b192-1b21cf346c77 req-fdab5834-01d5-4e90-8c44-40a632c94e27 service nova] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Received event network-vif-plugged-d71139e5-827d-49b3-98d2-ba6f2187dc35 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1847.041283] env[61215]: DEBUG oslo_concurrency.lockutils [req-a6018053-a569-454f-b192-1b21cf346c77 req-fdab5834-01d5-4e90-8c44-40a632c94e27 service nova] Acquiring lock "bb56c470-9f85-44b1-b1ec-f44236e9de51-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1847.041536] env[61215]: DEBUG oslo_concurrency.lockutils [req-a6018053-a569-454f-b192-1b21cf346c77 req-fdab5834-01d5-4e90-8c44-40a632c94e27 service nova] Lock "bb56c470-9f85-44b1-b1ec-f44236e9de51-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1847.041743] env[61215]: DEBUG oslo_concurrency.lockutils [req-a6018053-a569-454f-b192-1b21cf346c77 req-fdab5834-01d5-4e90-8c44-40a632c94e27 service nova] Lock "bb56c470-9f85-44b1-b1ec-f44236e9de51-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1847.041931] env[61215]: DEBUG nova.compute.manager [req-a6018053-a569-454f-b192-1b21cf346c77 req-fdab5834-01d5-4e90-8c44-40a632c94e27 service nova] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] No waiting events found dispatching network-vif-plugged-d71139e5-827d-49b3-98d2-ba6f2187dc35 {{(pid=61215) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1847.042124] env[61215]: WARNING nova.compute.manager [req-a6018053-a569-454f-b192-1b21cf346c77 req-fdab5834-01d5-4e90-8c44-40a632c94e27 service nova] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Received unexpected event network-vif-plugged-d71139e5-827d-49b3-98d2-ba6f2187dc35 for instance with vm_state building and task_state spawning. [ 1847.042298] env[61215]: DEBUG nova.compute.manager [req-a6018053-a569-454f-b192-1b21cf346c77 req-fdab5834-01d5-4e90-8c44-40a632c94e27 service nova] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Received event network-changed-d71139e5-827d-49b3-98d2-ba6f2187dc35 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1847.042460] env[61215]: DEBUG nova.compute.manager [req-a6018053-a569-454f-b192-1b21cf346c77 req-fdab5834-01d5-4e90-8c44-40a632c94e27 service nova] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Refreshing instance network info cache due to event network-changed-d71139e5-827d-49b3-98d2-ba6f2187dc35. {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 1847.042658] env[61215]: DEBUG oslo_concurrency.lockutils [req-a6018053-a569-454f-b192-1b21cf346c77 req-fdab5834-01d5-4e90-8c44-40a632c94e27 service nova] Acquiring lock "refresh_cache-bb56c470-9f85-44b1-b1ec-f44236e9de51" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1847.042800] env[61215]: DEBUG oslo_concurrency.lockutils [req-a6018053-a569-454f-b192-1b21cf346c77 req-fdab5834-01d5-4e90-8c44-40a632c94e27 service nova] Acquired lock "refresh_cache-bb56c470-9f85-44b1-b1ec-f44236e9de51" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1847.042974] env[61215]: DEBUG nova.network.neutron [req-a6018053-a569-454f-b192-1b21cf346c77 req-fdab5834-01d5-4e90-8c44-40a632c94e27 service nova] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Refreshing network info cache for port d71139e5-827d-49b3-98d2-ba6f2187dc35 {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1847.423601] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1847.424131] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Processing image e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1847.424450] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1847.611965] env[61215]: DEBUG nova.network.neutron [req-a6018053-a569-454f-b192-1b21cf346c77 req-fdab5834-01d5-4e90-8c44-40a632c94e27 service nova] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Updated VIF entry in instance network info cache for port d71139e5-827d-49b3-98d2-ba6f2187dc35. {{(pid=61215) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1847.612489] env[61215]: DEBUG nova.network.neutron [req-a6018053-a569-454f-b192-1b21cf346c77 req-fdab5834-01d5-4e90-8c44-40a632c94e27 service nova] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Updating instance_info_cache with network_info: [{"id": "3911b2de-be57-4735-af69-fac5973d8cb0", "address": "fa:16:3e:8a:85:bf", "network": {"id": "a4106f3a-0ac7-42c3-ae75-c701bf95b5f9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-110715612", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.249", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33bc963e963d493f8bad1328351cd968", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b931c4c-f73c-4fbd-9c9f-0270834cc69e", "external-id": "nsx-vlan-transportzone-177", "segmentation_id": 177, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3911b2de-be", "ovs_interfaceid": "3911b2de-be57-4735-af69-fac5973d8cb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "54c17a7e-00f1-483c-923f-a2ae3f199d1c", "address": "fa:16:3e:1e:8f:17", "network": {"id": "22a1e126-1f23-45be-b6b9-4f79e977db8e", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-64923139", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.252", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "33bc963e963d493f8bad1328351cd968", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3c5f8c7b-74a0-43f7-87b4-3c38dd26ac3e", "external-id": "nsx-vlan-transportzone-988", "segmentation_id": 988, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54c17a7e-00", "ovs_interfaceid": "54c17a7e-00f1-483c-923f-a2ae3f199d1c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "d71139e5-827d-49b3-98d2-ba6f2187dc35", "address": "fa:16:3e:8d:9d:90", "network": {"id": "a4106f3a-0ac7-42c3-ae75-c701bf95b5f9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-110715612", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.138", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33bc963e963d493f8bad1328351cd968", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b931c4c-f73c-4fbd-9c9f-0270834cc69e", "external-id": "nsx-vlan-transportzone-177", "segmentation_id": 177, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd71139e5-82", "ovs_interfaceid": "d71139e5-827d-49b3-98d2-ba6f2187dc35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1847.623361] env[61215]: DEBUG oslo_concurrency.lockutils [req-a6018053-a569-454f-b192-1b21cf346c77 req-fdab5834-01d5-4e90-8c44-40a632c94e27 service nova] Releasing lock "refresh_cache-bb56c470-9f85-44b1-b1ec-f44236e9de51" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1851.930987] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f393385d-e6a1-427b-8368-0a05ce352669 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Acquiring lock "bb56c470-9f85-44b1-b1ec-f44236e9de51" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1856.334199] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1856.341757] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1856.342189] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1856.653809] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1856.654037] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61215) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 1857.654949] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1857.654949] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Starting heal instance info cache {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 1857.654949] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Rebuilding the list of instances to heal {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1857.679155] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1857.679330] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1857.679467] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1857.679596] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1857.679721] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1857.679841] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1857.679998] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1857.680076] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1857.680193] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1857.680310] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1857.680429] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Didn't find any instances for network info cache update. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 1857.680963] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1858.020688] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Acquiring lock "66420497-c0f6-4f1d-86ee-23d53400e325" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1858.020991] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Lock "66420497-c0f6-4f1d-86ee-23d53400e325" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1858.178206] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Acquiring lock "59d93243-c15c-4554-863b-779d94b3d858" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1858.178483] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Lock "59d93243-c15c-4554-863b-779d94b3d858" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1858.654079] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1860.654419] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1860.654720] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1860.665892] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1860.666133] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1860.666307] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1860.666465] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61215) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1860.667632] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b79619c7-bcac-43ce-8b30-06bcd50dc92d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.676770] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48d09d51-34b7-4298-87bd-403f46404a59 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.690566] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ca2fbfd-d352-4a7e-a5c5-42531b9734c9 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.696714] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3256608-00b1-4fd0-b47c-d1d29e318c9d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.727528] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181331MB free_disk=173GB free_vcpus=48 pci_devices=None {{(pid=61215) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1860.727677] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1860.727876] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1860.803527] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1860.803694] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance d49f702b-cd29-4491-938c-0291b351ef20 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1860.803827] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1860.803951] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1860.804083] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 8d4665c7-67de-4ab3-a8b7-596a5e1152ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1860.804203] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1860.804321] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 2e186217-c1e1-40c6-8d84-988f35f6b93d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1860.804439] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1860.804557] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance c233ab81-232d-49be-a176-bf846f0d8cc3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1860.804671] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance bb56c470-9f85-44b1-b1ec-f44236e9de51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1860.816085] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance d0a5229f-8da2-40bb-af99-28f32923892f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1860.826974] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance ddaa12c8-88c6-4ba0-beec-cad92acd9768 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1860.837194] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance a17827ee-8ad2-459b-ba7d-f9f9be429e64 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1860.847323] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 96a536ea-a1c7-470e-8873-bc1e723efefa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1860.856349] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 40fe7cd9-7c99-4add-a2eb-429ff2aba7a2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1860.865278] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance e9369a71-fc94-4cdd-82c6-6308783581c1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1860.873981] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 38fe96cf-e570-4c0e-af6f-2f199efc06ff has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1860.882571] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance f91efd4b-851e-44bc-9cf2-7be8a2d2d7df has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1860.891276] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 0576d0b5-3890-4e1d-b208-40d46c2fdae7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1860.899947] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance bf807d62-c8be-4819-9fc1-4b2d6d14cc39 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1860.908919] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 12825ddf-86ee-4500-b43b-cf480dc54f3a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1860.918245] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance a42577f4-29ba-446b-a561-745ff14d1696 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1860.927887] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance e4cf3e92-a1a6-47ac-8625-37cdbf96cb35 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1860.937833] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 0d609df2-621c-456f-b8ce-a209e9052153 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1860.948294] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 66420497-c0f6-4f1d-86ee-23d53400e325 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1860.958481] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 59d93243-c15c-4554-863b-779d94b3d858 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1860.958816] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1860.958987] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1861.250035] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d38ac7b-43b9-4128-b0e2-966ec11fd532 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.257165] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb8c6102-9c36-49a4-851d-cfdd734939c0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.287733] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4604c291-fd26-4782-99a7-0e8f2ee479b4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.295502] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e11143aa-a149-497e-b9b9-5a2e77d7ce11 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.309935] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1861.318722] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1861.335611] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61215) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1861.335812] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.608s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1882.113161] env[61215]: DEBUG oslo_concurrency.lockutils [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Acquiring lock "f3a3a510-a085-4388-b49d-b4371095b436" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1882.113574] env[61215]: DEBUG oslo_concurrency.lockutils [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Lock "f3a3a510-a085-4388-b49d-b4371095b436" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1883.012331] env[61215]: DEBUG oslo_concurrency.lockutils [None req-45e84f55-b0c5-41f6-b2b3-1bbf70593657 tempest-ServerActionsTestOtherA-1801254831 tempest-ServerActionsTestOtherA-1801254831-project-member] Acquiring lock "ea0fe7f8-9070-4b17-bc36-d65c62a18923" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1883.012331] env[61215]: DEBUG oslo_concurrency.lockutils [None req-45e84f55-b0c5-41f6-b2b3-1bbf70593657 tempest-ServerActionsTestOtherA-1801254831 tempest-ServerActionsTestOtherA-1801254831-project-member] Lock "ea0fe7f8-9070-4b17-bc36-d65c62a18923" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1889.240969] env[61215]: WARNING oslo_vmware.rw_handles [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1889.240969] env[61215]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1889.240969] env[61215]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1889.240969] env[61215]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1889.240969] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1889.240969] env[61215]: ERROR oslo_vmware.rw_handles response.begin() [ 1889.240969] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1889.240969] env[61215]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1889.240969] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1889.240969] env[61215]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1889.240969] env[61215]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1889.240969] env[61215]: ERROR oslo_vmware.rw_handles [ 1889.241710] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Downloaded image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to vmware_temp/e9eaf53d-7a4b-4342-9316-f63d14708923/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1889.243568] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Caching image {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1889.243826] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Copying Virtual Disk [datastore1] vmware_temp/e9eaf53d-7a4b-4342-9316-f63d14708923/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk to [datastore1] vmware_temp/e9eaf53d-7a4b-4342-9316-f63d14708923/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk {{(pid=61215) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1889.244161] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4927df96-d427-4fb7-8ef0-76948e7f8dbd {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.254387] env[61215]: DEBUG oslo_vmware.api [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Waiting for the task: (returnval){ [ 1889.254387] env[61215]: value = "task-1690376" [ 1889.254387] env[61215]: _type = "Task" [ 1889.254387] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1889.262279] env[61215]: DEBUG oslo_vmware.api [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Task: {'id': task-1690376, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.766011] env[61215]: DEBUG oslo_vmware.exceptions [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Fault InvalidArgument not matched. {{(pid=61215) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1889.766312] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1889.766887] env[61215]: ERROR nova.compute.manager [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1889.766887] env[61215]: Faults: ['InvalidArgument'] [ 1889.766887] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Traceback (most recent call last): [ 1889.766887] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1889.766887] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] yield resources [ 1889.766887] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1889.766887] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] self.driver.spawn(context, instance, image_meta, [ 1889.766887] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1889.766887] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1889.766887] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1889.766887] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] self._fetch_image_if_missing(context, vi) [ 1889.766887] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1889.766887] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] image_cache(vi, tmp_image_ds_loc) [ 1889.766887] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1889.766887] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] vm_util.copy_virtual_disk( [ 1889.766887] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1889.766887] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] session._wait_for_task(vmdk_copy_task) [ 1889.766887] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1889.766887] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] return self.wait_for_task(task_ref) [ 1889.766887] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1889.766887] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] return evt.wait() [ 1889.766887] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1889.766887] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] result = hub.switch() [ 1889.766887] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1889.766887] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] return self.greenlet.switch() [ 1889.766887] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1889.766887] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] self.f(*self.args, **self.kw) [ 1889.766887] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1889.766887] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] raise exceptions.translate_fault(task_info.error) [ 1889.766887] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1889.766887] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Faults: ['InvalidArgument'] [ 1889.766887] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] [ 1889.768019] env[61215]: INFO nova.compute.manager [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Terminating instance [ 1889.768839] env[61215]: DEBUG oslo_concurrency.lockutils [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1889.769081] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1889.769323] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e1a93a13-be8b-4a99-aa06-35caed5c1a18 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.771479] env[61215]: DEBUG nova.compute.manager [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1889.771671] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1889.772399] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d795c050-a838-4c0f-be90-c2501b5aed2a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.779368] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Unregistering the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1889.779572] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1963b10e-b7c9-440e-a24a-d020f1c75624 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.781641] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1889.781817] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61215) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1889.782758] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1bb2d723-1f97-41aa-a1df-93cc536e878c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.787780] env[61215]: DEBUG oslo_vmware.api [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Waiting for the task: (returnval){ [ 1889.787780] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52ac5122-6c2b-39cc-3a13-27c234d2c52c" [ 1889.787780] env[61215]: _type = "Task" [ 1889.787780] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1889.798758] env[61215]: DEBUG oslo_vmware.api [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52ac5122-6c2b-39cc-3a13-27c234d2c52c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.871622] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Unregistered the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1889.871835] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Deleting contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1889.872050] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Deleting the datastore file [datastore1] 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1889.872295] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d88e5a9d-31bf-41b5-b689-c0454dd54f22 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.878650] env[61215]: DEBUG oslo_vmware.api [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Waiting for the task: (returnval){ [ 1889.878650] env[61215]: value = "task-1690378" [ 1889.878650] env[61215]: _type = "Task" [ 1889.878650] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1889.886616] env[61215]: DEBUG oslo_vmware.api [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Task: {'id': task-1690378, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.298695] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Preparing fetch location {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1890.299135] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Creating directory with path [datastore1] vmware_temp/f530f583-c92a-4f9a-85d8-9151df27df7d/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1890.299378] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9d324daa-7f4a-4bed-b1e4-5f6328bad62b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.310806] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Created directory with path [datastore1] vmware_temp/f530f583-c92a-4f9a-85d8-9151df27df7d/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1890.311072] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Fetch image to [datastore1] vmware_temp/f530f583-c92a-4f9a-85d8-9151df27df7d/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1890.311283] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to [datastore1] vmware_temp/f530f583-c92a-4f9a-85d8-9151df27df7d/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1890.312068] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bbcb9e8-8e47-43bd-bf4e-1de391813fc1 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.318991] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f666c6e-e565-49cc-a252-e2c042020a7f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.327741] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9522c4f-6258-4a63-9735-d9cc404f0f23 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.357530] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ad3e5e8-e804-4146-83e5-8345d5c84253 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.363010] env[61215]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-0e0899cd-1c68-44bd-9ecb-d41b0c9201cc {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.389160] env[61215]: DEBUG oslo_vmware.api [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Task: {'id': task-1690378, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.062039} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1890.390447] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1890.390651] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Deleted contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1890.390823] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1890.390999] env[61215]: INFO nova.compute.manager [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1890.392832] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1890.394795] env[61215]: DEBUG nova.compute.claims [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Aborting claim: {{(pid=61215) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1890.394976] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1890.395206] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1890.549722] env[61215]: DEBUG oslo_vmware.rw_handles [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f530f583-c92a-4f9a-85d8-9151df27df7d/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1890.610872] env[61215]: DEBUG oslo_vmware.rw_handles [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Completed reading data from the image iterator. {{(pid=61215) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1890.611123] env[61215]: DEBUG oslo_vmware.rw_handles [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f530f583-c92a-4f9a-85d8-9151df27df7d/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1892.972753] env[61215]: DEBUG oslo_concurrency.lockutils [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Acquiring lock "17d70ed3-4a82-48c8-95ad-c81fb0772e42" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.973175] env[61215]: DEBUG oslo_concurrency.lockutils [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Lock "17d70ed3-4a82-48c8-95ad-c81fb0772e42" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1893.045984] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0352cc6-7240-4780-8305-7f245512cc21 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.054227] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce7b25d9-1ece-4215-b2d0-7bff9aba459a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.084547] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c569c25e-57fa-4731-9cbc-17fce0c46283 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.092455] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e342cc7d-986a-48b1-b6ae-2dd69a4879c9 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.106862] env[61215]: DEBUG nova.compute.provider_tree [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1893.119091] env[61215]: DEBUG nova.scheduler.client.report [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1893.136145] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.741s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1893.136771] env[61215]: ERROR nova.compute.manager [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1893.136771] env[61215]: Faults: ['InvalidArgument'] [ 1893.136771] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Traceback (most recent call last): [ 1893.136771] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1893.136771] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] self.driver.spawn(context, instance, image_meta, [ 1893.136771] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1893.136771] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1893.136771] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1893.136771] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] self._fetch_image_if_missing(context, vi) [ 1893.136771] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1893.136771] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] image_cache(vi, tmp_image_ds_loc) [ 1893.136771] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1893.136771] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] vm_util.copy_virtual_disk( [ 1893.136771] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1893.136771] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] session._wait_for_task(vmdk_copy_task) [ 1893.136771] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1893.136771] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] return self.wait_for_task(task_ref) [ 1893.136771] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1893.136771] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] return evt.wait() [ 1893.136771] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1893.136771] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] result = hub.switch() [ 1893.136771] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1893.136771] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] return self.greenlet.switch() [ 1893.136771] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1893.136771] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] self.f(*self.args, **self.kw) [ 1893.136771] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1893.136771] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] raise exceptions.translate_fault(task_info.error) [ 1893.136771] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1893.136771] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Faults: ['InvalidArgument'] [ 1893.136771] env[61215]: ERROR nova.compute.manager [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] [ 1893.138336] env[61215]: DEBUG nova.compute.utils [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] VimFaultException {{(pid=61215) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1893.139179] env[61215]: DEBUG nova.compute.manager [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Build of instance 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9 was re-scheduled: A specified parameter was not correct: fileType [ 1893.139179] env[61215]: Faults: ['InvalidArgument'] {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1893.139624] env[61215]: DEBUG nova.compute.manager [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Unplugging VIFs for instance {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1893.139876] env[61215]: DEBUG nova.compute.manager [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1893.139975] env[61215]: DEBUG nova.compute.manager [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1893.140161] env[61215]: DEBUG nova.network.neutron [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1893.444835] env[61215]: DEBUG oslo_concurrency.lockutils [None req-eb8d599d-1968-4e6b-a515-d88704d582da tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquiring lock "be5cf1b4-da97-4944-bb38-f10943576b8f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1893.444835] env[61215]: DEBUG oslo_concurrency.lockutils [None req-eb8d599d-1968-4e6b-a515-d88704d582da tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Lock "be5cf1b4-da97-4944-bb38-f10943576b8f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1893.701302] env[61215]: DEBUG nova.network.neutron [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1893.722478] env[61215]: INFO nova.compute.manager [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Took 0.58 seconds to deallocate network for instance. [ 1893.865645] env[61215]: INFO nova.scheduler.client.report [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Deleted allocations for instance 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9 [ 1893.892018] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9becddae-2821-4b32-927d-97663b32664b tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Lock "44c01d19-58c0-43e6-ab53-d2a6e1edd3b9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 612.319s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1893.892018] env[61215]: DEBUG oslo_concurrency.lockutils [None req-16dc8d6f-eb38-4ea3-855e-f0ad85dfaf08 tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Lock "44c01d19-58c0-43e6-ab53-d2a6e1edd3b9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 412.977s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1893.892018] env[61215]: DEBUG oslo_concurrency.lockutils [None req-16dc8d6f-eb38-4ea3-855e-f0ad85dfaf08 tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Acquiring lock "44c01d19-58c0-43e6-ab53-d2a6e1edd3b9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1893.892018] env[61215]: DEBUG oslo_concurrency.lockutils [None req-16dc8d6f-eb38-4ea3-855e-f0ad85dfaf08 tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Lock "44c01d19-58c0-43e6-ab53-d2a6e1edd3b9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1893.892018] env[61215]: DEBUG oslo_concurrency.lockutils [None req-16dc8d6f-eb38-4ea3-855e-f0ad85dfaf08 tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Lock "44c01d19-58c0-43e6-ab53-d2a6e1edd3b9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1893.894789] env[61215]: INFO nova.compute.manager [None req-16dc8d6f-eb38-4ea3-855e-f0ad85dfaf08 tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Terminating instance [ 1893.897069] env[61215]: DEBUG nova.compute.manager [None req-16dc8d6f-eb38-4ea3-855e-f0ad85dfaf08 tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1893.897404] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-16dc8d6f-eb38-4ea3-855e-f0ad85dfaf08 tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1893.898010] env[61215]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b0cb09cc-6af1-45bf-b736-fa4b859e2c99 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.903936] env[61215]: DEBUG nova.compute.manager [None req-18546486-f39c-4bd7-b349-94556da42d67 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: d0a5229f-8da2-40bb-af99-28f32923892f] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1893.916113] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deb66f3b-2a1c-4267-b568-5a507c01d2c9 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.946022] env[61215]: WARNING nova.virt.vmwareapi.vmops [None req-16dc8d6f-eb38-4ea3-855e-f0ad85dfaf08 tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9 could not be found. [ 1893.946022] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-16dc8d6f-eb38-4ea3-855e-f0ad85dfaf08 tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1893.946022] env[61215]: INFO nova.compute.manager [None req-16dc8d6f-eb38-4ea3-855e-f0ad85dfaf08 tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1893.946022] env[61215]: DEBUG oslo.service.loopingcall [None req-16dc8d6f-eb38-4ea3-855e-f0ad85dfaf08 tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1893.946022] env[61215]: DEBUG nova.compute.manager [None req-18546486-f39c-4bd7-b349-94556da42d67 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] [instance: d0a5229f-8da2-40bb-af99-28f32923892f] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1893.946562] env[61215]: DEBUG nova.compute.manager [-] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1893.946866] env[61215]: DEBUG nova.network.neutron [-] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1893.972017] env[61215]: DEBUG oslo_concurrency.lockutils [None req-18546486-f39c-4bd7-b349-94556da42d67 tempest-VolumesAdminNegativeTest-835700900 tempest-VolumesAdminNegativeTest-835700900-project-member] Lock "d0a5229f-8da2-40bb-af99-28f32923892f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 228.494s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1893.973984] env[61215]: DEBUG nova.network.neutron [-] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1893.986032] env[61215]: DEBUG nova.compute.manager [None req-ed62679c-a7c6-4d81-8ec7-280920a68661 tempest-ServerActionsTestJSON-690131439 tempest-ServerActionsTestJSON-690131439-project-member] [instance: ddaa12c8-88c6-4ba0-beec-cad92acd9768] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1893.987082] env[61215]: INFO nova.compute.manager [-] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] Took 0.04 seconds to deallocate network for instance. [ 1894.019233] env[61215]: DEBUG nova.compute.manager [None req-ed62679c-a7c6-4d81-8ec7-280920a68661 tempest-ServerActionsTestJSON-690131439 tempest-ServerActionsTestJSON-690131439-project-member] [instance: ddaa12c8-88c6-4ba0-beec-cad92acd9768] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1894.047749] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ed62679c-a7c6-4d81-8ec7-280920a68661 tempest-ServerActionsTestJSON-690131439 tempest-ServerActionsTestJSON-690131439-project-member] Lock "ddaa12c8-88c6-4ba0-beec-cad92acd9768" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 215.319s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1894.056991] env[61215]: DEBUG nova.compute.manager [None req-b25645e4-d0d2-4489-9385-fab3f982d51b tempest-ServerRescueTestJSON-1345079088 tempest-ServerRescueTestJSON-1345079088-project-member] [instance: a17827ee-8ad2-459b-ba7d-f9f9be429e64] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1894.095869] env[61215]: DEBUG nova.compute.manager [None req-b25645e4-d0d2-4489-9385-fab3f982d51b tempest-ServerRescueTestJSON-1345079088 tempest-ServerRescueTestJSON-1345079088-project-member] [instance: a17827ee-8ad2-459b-ba7d-f9f9be429e64] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1894.113652] env[61215]: DEBUG oslo_concurrency.lockutils [None req-16dc8d6f-eb38-4ea3-855e-f0ad85dfaf08 tempest-VolumesAssistedSnapshotsTest-1691935299 tempest-VolumesAssistedSnapshotsTest-1691935299-project-member] Lock "44c01d19-58c0-43e6-ab53-d2a6e1edd3b9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.222s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1894.114565] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "44c01d19-58c0-43e6-ab53-d2a6e1edd3b9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 61.148s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1894.114755] env[61215]: INFO nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 44c01d19-58c0-43e6-ab53-d2a6e1edd3b9] During sync_power_state the instance has a pending task (deleting). Skip. [ 1894.114963] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "44c01d19-58c0-43e6-ab53-d2a6e1edd3b9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1894.123292] env[61215]: DEBUG oslo_concurrency.lockutils [None req-b25645e4-d0d2-4489-9385-fab3f982d51b tempest-ServerRescueTestJSON-1345079088 tempest-ServerRescueTestJSON-1345079088-project-member] Lock "a17827ee-8ad2-459b-ba7d-f9f9be429e64" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 208.577s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1894.132234] env[61215]: DEBUG nova.compute.manager [None req-941ddaa9-12ca-4945-bfe6-ebc4cd0cab74 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 96a536ea-a1c7-470e-8873-bc1e723efefa] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1894.155504] env[61215]: DEBUG nova.compute.manager [None req-941ddaa9-12ca-4945-bfe6-ebc4cd0cab74 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 96a536ea-a1c7-470e-8873-bc1e723efefa] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1894.177241] env[61215]: DEBUG oslo_concurrency.lockutils [None req-941ddaa9-12ca-4945-bfe6-ebc4cd0cab74 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Lock "96a536ea-a1c7-470e-8873-bc1e723efefa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 208.262s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1894.187428] env[61215]: DEBUG nova.compute.manager [None req-353adf5e-a541-4e36-8412-52317d627ef9 tempest-AttachVolumeShelveTestJSON-1147171237 tempest-AttachVolumeShelveTestJSON-1147171237-project-member] [instance: 40fe7cd9-7c99-4add-a2eb-429ff2aba7a2] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1894.215567] env[61215]: DEBUG nova.compute.manager [None req-353adf5e-a541-4e36-8412-52317d627ef9 tempest-AttachVolumeShelveTestJSON-1147171237 tempest-AttachVolumeShelveTestJSON-1147171237-project-member] [instance: 40fe7cd9-7c99-4add-a2eb-429ff2aba7a2] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1894.239319] env[61215]: DEBUG oslo_concurrency.lockutils [None req-353adf5e-a541-4e36-8412-52317d627ef9 tempest-AttachVolumeShelveTestJSON-1147171237 tempest-AttachVolumeShelveTestJSON-1147171237-project-member] Lock "40fe7cd9-7c99-4add-a2eb-429ff2aba7a2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 201.565s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1894.250034] env[61215]: DEBUG nova.compute.manager [None req-62d3796d-b92d-452e-891d-873362c48468 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] [instance: e9369a71-fc94-4cdd-82c6-6308783581c1] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1894.308802] env[61215]: DEBUG oslo_concurrency.lockutils [None req-62d3796d-b92d-452e-891d-873362c48468 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1894.309069] env[61215]: DEBUG oslo_concurrency.lockutils [None req-62d3796d-b92d-452e-891d-873362c48468 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1894.310631] env[61215]: INFO nova.compute.claims [None req-62d3796d-b92d-452e-891d-873362c48468 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] [instance: e9369a71-fc94-4cdd-82c6-6308783581c1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1894.694673] env[61215]: DEBUG oslo_concurrency.lockutils [None req-1873d84c-f8fe-4207-9fe3-fd6601a7d8a8 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] Acquiring lock "e9369a71-fc94-4cdd-82c6-6308783581c1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1894.765258] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f3b692c-8df0-456b-bca7-e6e1996d5e76 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.774536] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caa893ce-ac96-488b-ad6a-27350f466a9b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.804965] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d516b4d-d317-451c-a94f-5b95716930b4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.812937] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b269b836-f243-4c3c-93c6-5837f843cdf5 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.827655] env[61215]: DEBUG nova.compute.provider_tree [None req-62d3796d-b92d-452e-891d-873362c48468 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1894.835901] env[61215]: DEBUG nova.scheduler.client.report [None req-62d3796d-b92d-452e-891d-873362c48468 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1894.850959] env[61215]: DEBUG oslo_concurrency.lockutils [None req-62d3796d-b92d-452e-891d-873362c48468 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.542s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1894.851649] env[61215]: DEBUG nova.compute.manager [None req-62d3796d-b92d-452e-891d-873362c48468 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] [instance: e9369a71-fc94-4cdd-82c6-6308783581c1] Start building networks asynchronously for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1894.891101] env[61215]: DEBUG nova.compute.claims [None req-62d3796d-b92d-452e-891d-873362c48468 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] [instance: e9369a71-fc94-4cdd-82c6-6308783581c1] Aborting claim: {{(pid=61215) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1894.891331] env[61215]: DEBUG oslo_concurrency.lockutils [None req-62d3796d-b92d-452e-891d-873362c48468 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1894.891709] env[61215]: DEBUG oslo_concurrency.lockutils [None req-62d3796d-b92d-452e-891d-873362c48468 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.205510] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d08b069e-bbff-4cc1-82c0-f82b1d7cfc16 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.213249] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f206f7ce-88df-45eb-a756-341dc8b68988 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.245253] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5508ae55-29a8-4348-8fdc-6287b24272ac {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.253227] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-225f1498-04c0-4482-a9c5-267182e8e0d8 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.268186] env[61215]: DEBUG nova.compute.provider_tree [None req-62d3796d-b92d-452e-891d-873362c48468 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1895.277295] env[61215]: DEBUG nova.scheduler.client.report [None req-62d3796d-b92d-452e-891d-873362c48468 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1895.290640] env[61215]: DEBUG oslo_concurrency.lockutils [None req-62d3796d-b92d-452e-891d-873362c48468 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.399s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1895.291445] env[61215]: DEBUG nova.compute.utils [None req-62d3796d-b92d-452e-891d-873362c48468 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] [instance: e9369a71-fc94-4cdd-82c6-6308783581c1] Conflict updating instance e9369a71-fc94-4cdd-82c6-6308783581c1. Expected: {'task_state': [None]}. Actual: {'task_state': 'deleting'} {{(pid=61215) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1895.292948] env[61215]: DEBUG nova.compute.manager [None req-62d3796d-b92d-452e-891d-873362c48468 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] [instance: e9369a71-fc94-4cdd-82c6-6308783581c1] Instance disappeared during build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2505}} [ 1895.293133] env[61215]: DEBUG nova.compute.manager [None req-62d3796d-b92d-452e-891d-873362c48468 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] [instance: e9369a71-fc94-4cdd-82c6-6308783581c1] Unplugging VIFs for instance {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1895.293523] env[61215]: DEBUG oslo_concurrency.lockutils [None req-62d3796d-b92d-452e-891d-873362c48468 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] Acquiring lock "refresh_cache-e9369a71-fc94-4cdd-82c6-6308783581c1" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1895.293612] env[61215]: DEBUG oslo_concurrency.lockutils [None req-62d3796d-b92d-452e-891d-873362c48468 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] Acquired lock "refresh_cache-e9369a71-fc94-4cdd-82c6-6308783581c1" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1895.293784] env[61215]: DEBUG nova.network.neutron [None req-62d3796d-b92d-452e-891d-873362c48468 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] [instance: e9369a71-fc94-4cdd-82c6-6308783581c1] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1895.350515] env[61215]: DEBUG nova.network.neutron [None req-62d3796d-b92d-452e-891d-873362c48468 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] [instance: e9369a71-fc94-4cdd-82c6-6308783581c1] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1895.445121] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8c624a34-09da-48ba-8dda-5403d0c5cee9 tempest-AttachVolumeShelveTestJSON-1147171237 tempest-AttachVolumeShelveTestJSON-1147171237-project-member] Acquiring lock "3a7e93d3-cef2-4b3f-a20b-da373780e1c8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.445360] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8c624a34-09da-48ba-8dda-5403d0c5cee9 tempest-AttachVolumeShelveTestJSON-1147171237 tempest-AttachVolumeShelveTestJSON-1147171237-project-member] Lock "3a7e93d3-cef2-4b3f-a20b-da373780e1c8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.701473] env[61215]: DEBUG nova.network.neutron [None req-62d3796d-b92d-452e-891d-873362c48468 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] [instance: e9369a71-fc94-4cdd-82c6-6308783581c1] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1895.712870] env[61215]: DEBUG oslo_concurrency.lockutils [None req-62d3796d-b92d-452e-891d-873362c48468 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] Releasing lock "refresh_cache-e9369a71-fc94-4cdd-82c6-6308783581c1" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1895.713127] env[61215]: DEBUG nova.compute.manager [None req-62d3796d-b92d-452e-891d-873362c48468 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1895.713322] env[61215]: DEBUG nova.compute.manager [None req-62d3796d-b92d-452e-891d-873362c48468 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] [instance: e9369a71-fc94-4cdd-82c6-6308783581c1] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1895.713503] env[61215]: DEBUG nova.network.neutron [None req-62d3796d-b92d-452e-891d-873362c48468 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] [instance: e9369a71-fc94-4cdd-82c6-6308783581c1] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1895.731874] env[61215]: DEBUG nova.network.neutron [None req-62d3796d-b92d-452e-891d-873362c48468 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] [instance: e9369a71-fc94-4cdd-82c6-6308783581c1] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1895.739976] env[61215]: DEBUG nova.network.neutron [None req-62d3796d-b92d-452e-891d-873362c48468 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] [instance: e9369a71-fc94-4cdd-82c6-6308783581c1] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1895.748208] env[61215]: INFO nova.compute.manager [None req-62d3796d-b92d-452e-891d-873362c48468 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] [instance: e9369a71-fc94-4cdd-82c6-6308783581c1] Took 0.03 seconds to deallocate network for instance. [ 1895.822046] env[61215]: INFO nova.scheduler.client.report [None req-62d3796d-b92d-452e-891d-873362c48468 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] Deleted allocations for instance e9369a71-fc94-4cdd-82c6-6308783581c1 [ 1895.822381] env[61215]: DEBUG oslo_concurrency.lockutils [None req-62d3796d-b92d-452e-891d-873362c48468 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] Lock "e9369a71-fc94-4cdd-82c6-6308783581c1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 197.478s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1895.823691] env[61215]: DEBUG oslo_concurrency.lockutils [None req-1873d84c-f8fe-4207-9fe3-fd6601a7d8a8 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] Lock "e9369a71-fc94-4cdd-82c6-6308783581c1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 1.129s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.823784] env[61215]: DEBUG oslo_concurrency.lockutils [None req-1873d84c-f8fe-4207-9fe3-fd6601a7d8a8 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] Acquiring lock "e9369a71-fc94-4cdd-82c6-6308783581c1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.823976] env[61215]: DEBUG oslo_concurrency.lockutils [None req-1873d84c-f8fe-4207-9fe3-fd6601a7d8a8 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] Lock "e9369a71-fc94-4cdd-82c6-6308783581c1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.824268] env[61215]: DEBUG oslo_concurrency.lockutils [None req-1873d84c-f8fe-4207-9fe3-fd6601a7d8a8 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] Lock "e9369a71-fc94-4cdd-82c6-6308783581c1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1895.825992] env[61215]: INFO nova.compute.manager [None req-1873d84c-f8fe-4207-9fe3-fd6601a7d8a8 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] [instance: e9369a71-fc94-4cdd-82c6-6308783581c1] Terminating instance [ 1895.827681] env[61215]: DEBUG oslo_concurrency.lockutils [None req-1873d84c-f8fe-4207-9fe3-fd6601a7d8a8 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] Acquiring lock "refresh_cache-e9369a71-fc94-4cdd-82c6-6308783581c1" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1895.828642] env[61215]: DEBUG oslo_concurrency.lockutils [None req-1873d84c-f8fe-4207-9fe3-fd6601a7d8a8 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] Acquired lock "refresh_cache-e9369a71-fc94-4cdd-82c6-6308783581c1" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1895.828642] env[61215]: DEBUG nova.network.neutron [None req-1873d84c-f8fe-4207-9fe3-fd6601a7d8a8 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] [instance: e9369a71-fc94-4cdd-82c6-6308783581c1] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1895.832461] env[61215]: DEBUG nova.compute.manager [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1895.858374] env[61215]: DEBUG nova.network.neutron [None req-1873d84c-f8fe-4207-9fe3-fd6601a7d8a8 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] [instance: e9369a71-fc94-4cdd-82c6-6308783581c1] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1895.881931] env[61215]: DEBUG oslo_concurrency.lockutils [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.882234] env[61215]: DEBUG oslo_concurrency.lockutils [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.883793] env[61215]: INFO nova.compute.claims [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1895.987478] env[61215]: DEBUG nova.network.neutron [None req-1873d84c-f8fe-4207-9fe3-fd6601a7d8a8 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] [instance: e9369a71-fc94-4cdd-82c6-6308783581c1] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1895.998470] env[61215]: DEBUG oslo_concurrency.lockutils [None req-1873d84c-f8fe-4207-9fe3-fd6601a7d8a8 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] Releasing lock "refresh_cache-e9369a71-fc94-4cdd-82c6-6308783581c1" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1895.998923] env[61215]: DEBUG nova.compute.manager [None req-1873d84c-f8fe-4207-9fe3-fd6601a7d8a8 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] [instance: e9369a71-fc94-4cdd-82c6-6308783581c1] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1895.999143] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-1873d84c-f8fe-4207-9fe3-fd6601a7d8a8 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] [instance: e9369a71-fc94-4cdd-82c6-6308783581c1] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1896.001663] env[61215]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-193be7ed-7c17-4d92-a166-2624abde7274 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.010787] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49c95f92-a72f-4e54-ae05-905e9f1b5c58 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.042232] env[61215]: WARNING nova.virt.vmwareapi.vmops [None req-1873d84c-f8fe-4207-9fe3-fd6601a7d8a8 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] [instance: e9369a71-fc94-4cdd-82c6-6308783581c1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e9369a71-fc94-4cdd-82c6-6308783581c1 could not be found. [ 1896.042436] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-1873d84c-f8fe-4207-9fe3-fd6601a7d8a8 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] [instance: e9369a71-fc94-4cdd-82c6-6308783581c1] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1896.042604] env[61215]: INFO nova.compute.manager [None req-1873d84c-f8fe-4207-9fe3-fd6601a7d8a8 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] [instance: e9369a71-fc94-4cdd-82c6-6308783581c1] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1896.042843] env[61215]: DEBUG oslo.service.loopingcall [None req-1873d84c-f8fe-4207-9fe3-fd6601a7d8a8 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1896.043088] env[61215]: DEBUG nova.compute.manager [-] [instance: e9369a71-fc94-4cdd-82c6-6308783581c1] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1896.043191] env[61215]: DEBUG nova.network.neutron [-] [instance: e9369a71-fc94-4cdd-82c6-6308783581c1] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1896.062490] env[61215]: DEBUG nova.network.neutron [-] [instance: e9369a71-fc94-4cdd-82c6-6308783581c1] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1896.069593] env[61215]: DEBUG nova.network.neutron [-] [instance: e9369a71-fc94-4cdd-82c6-6308783581c1] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1896.077557] env[61215]: INFO nova.compute.manager [-] [instance: e9369a71-fc94-4cdd-82c6-6308783581c1] Took 0.03 seconds to deallocate network for instance. [ 1896.159875] env[61215]: DEBUG oslo_concurrency.lockutils [None req-1873d84c-f8fe-4207-9fe3-fd6601a7d8a8 tempest-AttachVolumeNegativeTest-1117439275 tempest-AttachVolumeNegativeTest-1117439275-project-member] Lock "e9369a71-fc94-4cdd-82c6-6308783581c1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.336s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1896.213730] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ca59faa-412a-42e2-ad14-ceaab57bd7e4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.221433] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e999008a-2603-43a7-b30c-5f02ad59e334 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.250050] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ef368c9-9a94-4b86-ba72-614250c32b34 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.256843] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a53f736-d482-4235-bb40-da6f44102482 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.269611] env[61215]: DEBUG nova.compute.provider_tree [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1896.278087] env[61215]: DEBUG nova.scheduler.client.report [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1896.291693] env[61215]: DEBUG oslo_concurrency.lockutils [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.409s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1896.292335] env[61215]: DEBUG nova.compute.manager [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Start building networks asynchronously for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1897.548900] env[61215]: DEBUG nova.compute.utils [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Using /dev/sd instead of None {{(pid=61215) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1897.551705] env[61215]: DEBUG nova.compute.manager [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Allocating IP information in the background. {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1897.551890] env[61215]: DEBUG nova.network.neutron [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] allocate_for_instance() {{(pid=61215) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1897.567461] env[61215]: DEBUG nova.compute.manager [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Start building block device mappings for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1897.626941] env[61215]: DEBUG nova.policy [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a388085d2fd54b63a03890b752607483', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a636e6a12bf245c9993b281dd617e167', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61215) authorize /opt/stack/nova/nova/policy.py:203}} [ 1897.647813] env[61215]: DEBUG nova.compute.manager [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Start spawning the instance on the hypervisor. {{(pid=61215) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1897.681062] env[61215]: DEBUG nova.virt.hardware [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:05:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:04:45Z,direct_url=,disk_format='vmdk',id=e91f0c25-9ff9-4937-8440-f47cfb2028bc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9bc3f57c82894c5d9e08b66e77a25dc5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:04:46Z,virtual_size=,visibility=), allow threads: False {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1897.681399] env[61215]: DEBUG nova.virt.hardware [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Flavor limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1897.681657] env[61215]: DEBUG nova.virt.hardware [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Image limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1897.681735] env[61215]: DEBUG nova.virt.hardware [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Flavor pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1897.681887] env[61215]: DEBUG nova.virt.hardware [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Image pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1897.682044] env[61215]: DEBUG nova.virt.hardware [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1897.682341] env[61215]: DEBUG nova.virt.hardware [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1897.682552] env[61215]: DEBUG nova.virt.hardware [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1897.682675] env[61215]: DEBUG nova.virt.hardware [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Got 1 possible topologies {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1897.682870] env[61215]: DEBUG nova.virt.hardware [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1897.683010] env[61215]: DEBUG nova.virt.hardware [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1897.684243] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcd7eecf-4fb8-46f3-88ad-a01e24d9be9c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.692090] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2692bf16-9f79-484d-aef9-76eb6f58d2b1 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.025211] env[61215]: DEBUG nova.network.neutron [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Successfully created port: f83cf4fa-4778-4beb-a82c-a718eceac784 {{(pid=61215) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1898.317328] env[61215]: DEBUG oslo_concurrency.lockutils [None req-461a8408-a21d-43e1-90cb-b10da298e015 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Acquiring lock "38fe96cf-e570-4c0e-af6f-2f199efc06ff" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1898.978612] env[61215]: DEBUG nova.network.neutron [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Successfully updated port: f83cf4fa-4778-4beb-a82c-a718eceac784 {{(pid=61215) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1898.996820] env[61215]: DEBUG oslo_concurrency.lockutils [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Acquiring lock "refresh_cache-38fe96cf-e570-4c0e-af6f-2f199efc06ff" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1898.997139] env[61215]: DEBUG oslo_concurrency.lockutils [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Acquired lock "refresh_cache-38fe96cf-e570-4c0e-af6f-2f199efc06ff" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1898.997139] env[61215]: DEBUG nova.network.neutron [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1899.065788] env[61215]: DEBUG nova.network.neutron [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1899.093214] env[61215]: DEBUG nova.compute.manager [req-00099c78-a02c-4671-852c-35ff5a4115bd req-f8369239-acf9-463e-887b-e334cf407ff1 service nova] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Received event network-vif-plugged-f83cf4fa-4778-4beb-a82c-a718eceac784 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1899.093214] env[61215]: DEBUG oslo_concurrency.lockutils [req-00099c78-a02c-4671-852c-35ff5a4115bd req-f8369239-acf9-463e-887b-e334cf407ff1 service nova] Acquiring lock "38fe96cf-e570-4c0e-af6f-2f199efc06ff-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1899.093214] env[61215]: DEBUG oslo_concurrency.lockutils [req-00099c78-a02c-4671-852c-35ff5a4115bd req-f8369239-acf9-463e-887b-e334cf407ff1 service nova] Lock "38fe96cf-e570-4c0e-af6f-2f199efc06ff-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1899.093483] env[61215]: DEBUG oslo_concurrency.lockutils [req-00099c78-a02c-4671-852c-35ff5a4115bd req-f8369239-acf9-463e-887b-e334cf407ff1 service nova] Lock "38fe96cf-e570-4c0e-af6f-2f199efc06ff-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1899.093546] env[61215]: DEBUG nova.compute.manager [req-00099c78-a02c-4671-852c-35ff5a4115bd req-f8369239-acf9-463e-887b-e334cf407ff1 service nova] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] No waiting events found dispatching network-vif-plugged-f83cf4fa-4778-4beb-a82c-a718eceac784 {{(pid=61215) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1899.093681] env[61215]: WARNING nova.compute.manager [req-00099c78-a02c-4671-852c-35ff5a4115bd req-f8369239-acf9-463e-887b-e334cf407ff1 service nova] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Received unexpected event network-vif-plugged-f83cf4fa-4778-4beb-a82c-a718eceac784 for instance with vm_state building and task_state deleting. [ 1899.297635] env[61215]: DEBUG nova.network.neutron [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Updating instance_info_cache with network_info: [{"id": "f83cf4fa-4778-4beb-a82c-a718eceac784", "address": "fa:16:3e:f1:74:95", "network": {"id": "2e1d3248-258a-4972-9b79-db19a1c10825", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1547109818-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a636e6a12bf245c9993b281dd617e167", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37333dc2-982e-45e9-9dda-0c18417d7fa6", "external-id": "nsx-vlan-transportzone-227", "segmentation_id": 227, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf83cf4fa-47", "ovs_interfaceid": "f83cf4fa-4778-4beb-a82c-a718eceac784", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1899.314138] env[61215]: DEBUG oslo_concurrency.lockutils [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Releasing lock "refresh_cache-38fe96cf-e570-4c0e-af6f-2f199efc06ff" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1899.314448] env[61215]: DEBUG nova.compute.manager [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Instance network_info: |[{"id": "f83cf4fa-4778-4beb-a82c-a718eceac784", "address": "fa:16:3e:f1:74:95", "network": {"id": "2e1d3248-258a-4972-9b79-db19a1c10825", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1547109818-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a636e6a12bf245c9993b281dd617e167", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37333dc2-982e-45e9-9dda-0c18417d7fa6", "external-id": "nsx-vlan-transportzone-227", "segmentation_id": 227, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf83cf4fa-47", "ovs_interfaceid": "f83cf4fa-4778-4beb-a82c-a718eceac784", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1899.314863] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f1:74:95', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '37333dc2-982e-45e9-9dda-0c18417d7fa6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f83cf4fa-4778-4beb-a82c-a718eceac784', 'vif_model': 'vmxnet3'}] {{(pid=61215) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1899.322464] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Creating folder: Project (a636e6a12bf245c9993b281dd617e167). Parent ref: group-v352463. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1899.323070] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-52540c66-7b30-42e8-aff1-cd64b799b267 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.337391] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Created folder: Project (a636e6a12bf245c9993b281dd617e167) in parent group-v352463. [ 1899.337391] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Creating folder: Instances. Parent ref: group-v352536. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1899.337631] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8e170853-5aad-445d-8518-79ca200f4825 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.347869] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Created folder: Instances in parent group-v352536. [ 1899.348119] env[61215]: DEBUG oslo.service.loopingcall [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1899.348309] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Creating VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1899.348510] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-50d31760-4af8-4df0-af73-edee7bb7a5a7 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.367831] env[61215]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1899.367831] env[61215]: value = "task-1690381" [ 1899.367831] env[61215]: _type = "Task" [ 1899.367831] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1899.376749] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690381, 'name': CreateVM_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.879975] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690381, 'name': CreateVM_Task, 'duration_secs': 0.342013} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1899.880174] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Created VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1899.887411] env[61215]: DEBUG oslo_concurrency.lockutils [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1899.887516] env[61215]: DEBUG oslo_concurrency.lockutils [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1899.887859] env[61215]: DEBUG oslo_concurrency.lockutils [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1899.888137] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8be293e-a4d0-4c21-a6cb-76d4c35b2743 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.893364] env[61215]: DEBUG oslo_vmware.api [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Waiting for the task: (returnval){ [ 1899.893364] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52ac59c9-84be-ad06-9a50-a123d1668f0f" [ 1899.893364] env[61215]: _type = "Task" [ 1899.893364] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1899.901284] env[61215]: DEBUG oslo_vmware.api [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52ac59c9-84be-ad06-9a50-a123d1668f0f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.405689] env[61215]: DEBUG oslo_concurrency.lockutils [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1900.405689] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Processing image e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1900.405689] env[61215]: DEBUG oslo_concurrency.lockutils [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1901.147290] env[61215]: DEBUG nova.compute.manager [req-85652f33-a558-4d48-8069-c21bc8a93bf0 req-a9ef3147-f860-49dd-9c2b-eba816019e7e service nova] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Received event network-changed-f83cf4fa-4778-4beb-a82c-a718eceac784 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1901.147513] env[61215]: DEBUG nova.compute.manager [req-85652f33-a558-4d48-8069-c21bc8a93bf0 req-a9ef3147-f860-49dd-9c2b-eba816019e7e service nova] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Refreshing instance network info cache due to event network-changed-f83cf4fa-4778-4beb-a82c-a718eceac784. {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 1901.147744] env[61215]: DEBUG oslo_concurrency.lockutils [req-85652f33-a558-4d48-8069-c21bc8a93bf0 req-a9ef3147-f860-49dd-9c2b-eba816019e7e service nova] Acquiring lock "refresh_cache-38fe96cf-e570-4c0e-af6f-2f199efc06ff" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1901.147891] env[61215]: DEBUG oslo_concurrency.lockutils [req-85652f33-a558-4d48-8069-c21bc8a93bf0 req-a9ef3147-f860-49dd-9c2b-eba816019e7e service nova] Acquired lock "refresh_cache-38fe96cf-e570-4c0e-af6f-2f199efc06ff" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1901.148403] env[61215]: DEBUG nova.network.neutron [req-85652f33-a558-4d48-8069-c21bc8a93bf0 req-a9ef3147-f860-49dd-9c2b-eba816019e7e service nova] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Refreshing network info cache for port f83cf4fa-4778-4beb-a82c-a718eceac784 {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1901.536108] env[61215]: DEBUG nova.network.neutron [req-85652f33-a558-4d48-8069-c21bc8a93bf0 req-a9ef3147-f860-49dd-9c2b-eba816019e7e service nova] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Updated VIF entry in instance network info cache for port f83cf4fa-4778-4beb-a82c-a718eceac784. {{(pid=61215) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1901.536108] env[61215]: DEBUG nova.network.neutron [req-85652f33-a558-4d48-8069-c21bc8a93bf0 req-a9ef3147-f860-49dd-9c2b-eba816019e7e service nova] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Updating instance_info_cache with network_info: [{"id": "f83cf4fa-4778-4beb-a82c-a718eceac784", "address": "fa:16:3e:f1:74:95", "network": {"id": "2e1d3248-258a-4972-9b79-db19a1c10825", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1547109818-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a636e6a12bf245c9993b281dd617e167", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37333dc2-982e-45e9-9dda-0c18417d7fa6", "external-id": "nsx-vlan-transportzone-227", "segmentation_id": 227, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf83cf4fa-47", "ovs_interfaceid": "f83cf4fa-4778-4beb-a82c-a718eceac784", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1901.548957] env[61215]: DEBUG oslo_concurrency.lockutils [req-85652f33-a558-4d48-8069-c21bc8a93bf0 req-a9ef3147-f860-49dd-9c2b-eba816019e7e service nova] Releasing lock "refresh_cache-38fe96cf-e570-4c0e-af6f-2f199efc06ff" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1914.330581] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1916.654465] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1916.654465] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1918.654599] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1918.654822] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Starting heal instance info cache {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 1918.654932] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Rebuilding the list of instances to heal {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1918.680024] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1918.680024] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1918.680024] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1918.680024] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1918.680024] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1918.680024] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1918.680024] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1918.680269] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1918.680269] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1918.680321] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1918.680445] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Didn't find any instances for network info cache update. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 1918.680975] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1918.681137] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61215) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 1919.654588] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1919.654811] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1920.651086] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1921.653664] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1921.653907] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1921.665076] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1921.665307] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1921.665473] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1921.665632] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61215) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1921.666830] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9c8d70e-89b2-45fb-826d-3dd81d9d4b53 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.675675] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9835dae4-7456-49dd-8dd7-b57a873d2c8a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.690574] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a497956e-4372-40d2-8372-bd3143dc1141 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.696817] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-563e9cff-3c9d-4a9c-a522-c4d36a4bc717 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.724798] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181318MB free_disk=173GB free_vcpus=48 pci_devices=None {{(pid=61215) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1921.724938] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1921.725144] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1921.798055] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance d49f702b-cd29-4491-938c-0291b351ef20 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1921.798230] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1921.798362] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1921.798488] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 8d4665c7-67de-4ab3-a8b7-596a5e1152ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1921.798605] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1921.798721] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 2e186217-c1e1-40c6-8d84-988f35f6b93d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1921.798838] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1921.798953] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance c233ab81-232d-49be-a176-bf846f0d8cc3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1921.799083] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance bb56c470-9f85-44b1-b1ec-f44236e9de51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1921.799200] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 38fe96cf-e570-4c0e-af6f-2f199efc06ff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1921.810684] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 12825ddf-86ee-4500-b43b-cf480dc54f3a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1921.821570] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance a42577f4-29ba-446b-a561-745ff14d1696 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1921.831896] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance e4cf3e92-a1a6-47ac-8625-37cdbf96cb35 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1921.843371] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 0d609df2-621c-456f-b8ce-a209e9052153 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1921.853076] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 66420497-c0f6-4f1d-86ee-23d53400e325 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1921.864096] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 59d93243-c15c-4554-863b-779d94b3d858 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1921.874025] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance f3a3a510-a085-4388-b49d-b4371095b436 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1921.887304] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance ea0fe7f8-9070-4b17-bc36-d65c62a18923 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1921.896708] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 17d70ed3-4a82-48c8-95ad-c81fb0772e42 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1921.907435] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance be5cf1b4-da97-4944-bb38-f10943576b8f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1921.917449] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 3a7e93d3-cef2-4b3f-a20b-da373780e1c8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1921.917690] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1921.917836] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1922.156025] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ae63e9f-059d-4936-ac51-69440e482f6b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.164047] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb8dc0b8-e1f1-4b28-9bd1-66e1df5465ae {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.194563] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c67476ee-24cd-490e-aaa1-283a6bfbe1d1 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.201876] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fc77154-6eff-4987-be9a-a69c0941dd70 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.214749] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1922.222948] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1922.238703] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61215) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1922.238881] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.514s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1928.793731] env[61215]: DEBUG oslo_concurrency.lockutils [None req-56d03437-a8f9-46fd-a343-b1168a0e2e07 tempest-InstanceActionsV221TestJSON-1482815884 tempest-InstanceActionsV221TestJSON-1482815884-project-member] Acquiring lock "90f1ad20-b501-4f1e-95ff-1d428c51c242" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1928.794113] env[61215]: DEBUG oslo_concurrency.lockutils [None req-56d03437-a8f9-46fd-a343-b1168a0e2e07 tempest-InstanceActionsV221TestJSON-1482815884 tempest-InstanceActionsV221TestJSON-1482815884-project-member] Lock "90f1ad20-b501-4f1e-95ff-1d428c51c242" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1934.272522] env[61215]: DEBUG oslo_concurrency.lockutils [None req-58cf2928-028f-4efd-acf6-6aa48060bdd0 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Acquiring lock "02e32086-8fe7-4def-ac71-7c4c43ee0f23" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1934.272522] env[61215]: DEBUG oslo_concurrency.lockutils [None req-58cf2928-028f-4efd-acf6-6aa48060bdd0 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Lock "02e32086-8fe7-4def-ac71-7c4c43ee0f23" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1936.208094] env[61215]: WARNING oslo_vmware.rw_handles [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1936.208094] env[61215]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1936.208094] env[61215]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1936.208094] env[61215]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1936.208094] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1936.208094] env[61215]: ERROR oslo_vmware.rw_handles response.begin() [ 1936.208094] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1936.208094] env[61215]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1936.208094] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1936.208094] env[61215]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1936.208094] env[61215]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1936.208094] env[61215]: ERROR oslo_vmware.rw_handles [ 1936.208599] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Downloaded image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to vmware_temp/f530f583-c92a-4f9a-85d8-9151df27df7d/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1936.210580] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Caching image {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1936.210822] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Copying Virtual Disk [datastore1] vmware_temp/f530f583-c92a-4f9a-85d8-9151df27df7d/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk to [datastore1] vmware_temp/f530f583-c92a-4f9a-85d8-9151df27df7d/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk {{(pid=61215) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1936.211115] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a5be5f65-3e19-45e3-96b7-e64c56f25fa1 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.219620] env[61215]: DEBUG oslo_vmware.api [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Waiting for the task: (returnval){ [ 1936.219620] env[61215]: value = "task-1690382" [ 1936.219620] env[61215]: _type = "Task" [ 1936.219620] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1936.229788] env[61215]: DEBUG oslo_vmware.api [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Task: {'id': task-1690382, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1936.730164] env[61215]: DEBUG oslo_vmware.exceptions [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Fault InvalidArgument not matched. {{(pid=61215) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1936.730460] env[61215]: DEBUG oslo_concurrency.lockutils [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1936.731053] env[61215]: ERROR nova.compute.manager [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1936.731053] env[61215]: Faults: ['InvalidArgument'] [ 1936.731053] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] Traceback (most recent call last): [ 1936.731053] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1936.731053] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] yield resources [ 1936.731053] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1936.731053] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] self.driver.spawn(context, instance, image_meta, [ 1936.731053] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1936.731053] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1936.731053] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1936.731053] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] self._fetch_image_if_missing(context, vi) [ 1936.731053] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1936.731053] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] image_cache(vi, tmp_image_ds_loc) [ 1936.731053] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1936.731053] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] vm_util.copy_virtual_disk( [ 1936.731053] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1936.731053] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] session._wait_for_task(vmdk_copy_task) [ 1936.731053] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1936.731053] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] return self.wait_for_task(task_ref) [ 1936.731053] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1936.731053] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] return evt.wait() [ 1936.731053] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1936.731053] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] result = hub.switch() [ 1936.731053] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1936.731053] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] return self.greenlet.switch() [ 1936.731053] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1936.731053] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] self.f(*self.args, **self.kw) [ 1936.731053] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1936.731053] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] raise exceptions.translate_fault(task_info.error) [ 1936.731053] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1936.731053] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] Faults: ['InvalidArgument'] [ 1936.731053] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] [ 1936.731893] env[61215]: INFO nova.compute.manager [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Terminating instance [ 1936.733129] env[61215]: DEBUG oslo_concurrency.lockutils [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1936.733354] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1936.734027] env[61215]: DEBUG nova.compute.manager [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1936.734226] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1936.734456] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d03e98d0-7859-4600-87de-689afe8f3fc3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.736664] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-184babbc-21a9-479a-9b01-af72712dc5d4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.743938] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Unregistering the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1936.744170] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-55131a8c-0443-4fa4-b236-c49ab147978e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.746404] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1936.746583] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61215) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1936.747528] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4cfac0b1-b63a-422e-9d71-5af9b9ddeb0e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.752183] env[61215]: DEBUG oslo_vmware.api [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Waiting for the task: (returnval){ [ 1936.752183] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]521b93aa-589d-a88e-d1d4-10335bd04ad7" [ 1936.752183] env[61215]: _type = "Task" [ 1936.752183] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1936.759353] env[61215]: DEBUG oslo_vmware.api [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]521b93aa-589d-a88e-d1d4-10335bd04ad7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1936.895173] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Unregistered the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1936.895414] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Deleting contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1936.895611] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Deleting the datastore file [datastore1] d49f702b-cd29-4491-938c-0291b351ef20 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1936.895871] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6a800bd8-61b5-4462-a2f8-26e969d35eda {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.902491] env[61215]: DEBUG oslo_vmware.api [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Waiting for the task: (returnval){ [ 1936.902491] env[61215]: value = "task-1690384" [ 1936.902491] env[61215]: _type = "Task" [ 1936.902491] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1936.909832] env[61215]: DEBUG oslo_vmware.api [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Task: {'id': task-1690384, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1937.262773] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Preparing fetch location {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1937.263069] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Creating directory with path [datastore1] vmware_temp/9fa0c294-d73f-4e0c-8d0b-787744d36613/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1937.263298] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-82aaeabc-3a80-42a6-bb8f-485c8c70f5a4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.274605] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Created directory with path [datastore1] vmware_temp/9fa0c294-d73f-4e0c-8d0b-787744d36613/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1937.274867] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Fetch image to [datastore1] vmware_temp/9fa0c294-d73f-4e0c-8d0b-787744d36613/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1937.275066] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to [datastore1] vmware_temp/9fa0c294-d73f-4e0c-8d0b-787744d36613/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1937.275805] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5a6d5f1-bedb-4346-89d5-a7f2166b9182 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.282317] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75afc805-4c21-4b88-990f-38e280dd0ddc {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.291301] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bd09864-04c2-4266-a40d-bb9112c05578 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.320841] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21b8b531-75d0-4a62-8d71-7142bb609d43 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.326091] env[61215]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-9d791f51-af5b-4f44-9af4-80b8dab64bf3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.350259] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1937.413962] env[61215]: DEBUG oslo_vmware.api [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Task: {'id': task-1690384, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.082601} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1937.414213] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1937.414404] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Deleted contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1937.414576] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1937.414779] env[61215]: INFO nova.compute.manager [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Took 0.68 seconds to destroy the instance on the hypervisor. [ 1937.416850] env[61215]: DEBUG nova.compute.claims [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Aborting claim: {{(pid=61215) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1937.417045] env[61215]: DEBUG oslo_concurrency.lockutils [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1937.417617] env[61215]: DEBUG oslo_concurrency.lockutils [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1937.504998] env[61215]: DEBUG oslo_vmware.rw_handles [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9fa0c294-d73f-4e0c-8d0b-787744d36613/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1937.565775] env[61215]: DEBUG oslo_vmware.rw_handles [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Completed reading data from the image iterator. {{(pid=61215) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1937.566038] env[61215]: DEBUG oslo_vmware.rw_handles [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9fa0c294-d73f-4e0c-8d0b-787744d36613/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1939.255490] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f89860a0-34ee-41b7-ad2b-ddfd0581439f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.263427] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09a6295f-ff3f-4a62-88d6-e109a22e96aa {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.294261] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d6f7ffb-538c-4feb-b131-438ddb064435 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.302067] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34b2c049-d75e-43bd-9550-33a239e83360 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.317414] env[61215]: DEBUG nova.compute.provider_tree [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1939.323905] env[61215]: DEBUG nova.scheduler.client.report [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1939.338129] env[61215]: DEBUG oslo_concurrency.lockutils [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.921s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1939.338680] env[61215]: ERROR nova.compute.manager [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1939.338680] env[61215]: Faults: ['InvalidArgument'] [ 1939.338680] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] Traceback (most recent call last): [ 1939.338680] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1939.338680] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] self.driver.spawn(context, instance, image_meta, [ 1939.338680] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1939.338680] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1939.338680] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1939.338680] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] self._fetch_image_if_missing(context, vi) [ 1939.338680] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1939.338680] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] image_cache(vi, tmp_image_ds_loc) [ 1939.338680] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1939.338680] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] vm_util.copy_virtual_disk( [ 1939.338680] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1939.338680] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] session._wait_for_task(vmdk_copy_task) [ 1939.338680] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1939.338680] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] return self.wait_for_task(task_ref) [ 1939.338680] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1939.338680] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] return evt.wait() [ 1939.338680] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1939.338680] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] result = hub.switch() [ 1939.338680] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1939.338680] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] return self.greenlet.switch() [ 1939.338680] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1939.338680] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] self.f(*self.args, **self.kw) [ 1939.338680] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1939.338680] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] raise exceptions.translate_fault(task_info.error) [ 1939.338680] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1939.338680] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] Faults: ['InvalidArgument'] [ 1939.338680] env[61215]: ERROR nova.compute.manager [instance: d49f702b-cd29-4491-938c-0291b351ef20] [ 1939.339475] env[61215]: DEBUG nova.compute.utils [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] VimFaultException {{(pid=61215) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1939.340853] env[61215]: DEBUG nova.compute.manager [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Build of instance d49f702b-cd29-4491-938c-0291b351ef20 was re-scheduled: A specified parameter was not correct: fileType [ 1939.340853] env[61215]: Faults: ['InvalidArgument'] {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1939.341244] env[61215]: DEBUG nova.compute.manager [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Unplugging VIFs for instance {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1939.341421] env[61215]: DEBUG nova.compute.manager [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1939.341636] env[61215]: DEBUG nova.compute.manager [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1939.341819] env[61215]: DEBUG nova.network.neutron [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1939.689756] env[61215]: DEBUG nova.network.neutron [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1939.707931] env[61215]: INFO nova.compute.manager [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Took 0.37 seconds to deallocate network for instance. [ 1939.810795] env[61215]: INFO nova.scheduler.client.report [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Deleted allocations for instance d49f702b-cd29-4491-938c-0291b351ef20 [ 1939.833796] env[61215]: DEBUG oslo_concurrency.lockutils [None req-85570b98-6817-4ab8-8ef8-fa2b74fea28f tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Lock "d49f702b-cd29-4491-938c-0291b351ef20" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 657.699s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1939.834089] env[61215]: DEBUG oslo_concurrency.lockutils [None req-361d7434-e052-4d23-b682-f02d37c50b7c tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Lock "d49f702b-cd29-4491-938c-0291b351ef20" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 458.667s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1939.834332] env[61215]: DEBUG oslo_concurrency.lockutils [None req-361d7434-e052-4d23-b682-f02d37c50b7c tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Acquiring lock "d49f702b-cd29-4491-938c-0291b351ef20-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1939.834547] env[61215]: DEBUG oslo_concurrency.lockutils [None req-361d7434-e052-4d23-b682-f02d37c50b7c tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Lock "d49f702b-cd29-4491-938c-0291b351ef20-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1939.834765] env[61215]: DEBUG oslo_concurrency.lockutils [None req-361d7434-e052-4d23-b682-f02d37c50b7c tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Lock "d49f702b-cd29-4491-938c-0291b351ef20-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1939.837352] env[61215]: INFO nova.compute.manager [None req-361d7434-e052-4d23-b682-f02d37c50b7c tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Terminating instance [ 1939.839437] env[61215]: DEBUG nova.compute.manager [None req-361d7434-e052-4d23-b682-f02d37c50b7c tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1939.839656] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-361d7434-e052-4d23-b682-f02d37c50b7c tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1939.840159] env[61215]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aac2f3ab-1ee7-472d-8c78-e67d3842b276 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.849150] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90c45311-3a93-4f60-8f27-3b88726f5128 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.860077] env[61215]: DEBUG nova.compute.manager [None req-21e8726b-da04-4770-9ccd-271b724f3f36 tempest-ServerRescueTestJSONUnderV235-11277943 tempest-ServerRescueTestJSONUnderV235-11277943-project-member] [instance: f91efd4b-851e-44bc-9cf2-7be8a2d2d7df] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1939.879923] env[61215]: WARNING nova.virt.vmwareapi.vmops [None req-361d7434-e052-4d23-b682-f02d37c50b7c tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d49f702b-cd29-4491-938c-0291b351ef20 could not be found. [ 1939.880148] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-361d7434-e052-4d23-b682-f02d37c50b7c tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1939.880331] env[61215]: INFO nova.compute.manager [None req-361d7434-e052-4d23-b682-f02d37c50b7c tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1939.880605] env[61215]: DEBUG oslo.service.loopingcall [None req-361d7434-e052-4d23-b682-f02d37c50b7c tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1939.880839] env[61215]: DEBUG nova.compute.manager [-] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1939.880941] env[61215]: DEBUG nova.network.neutron [-] [instance: d49f702b-cd29-4491-938c-0291b351ef20] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1939.897854] env[61215]: DEBUG nova.compute.manager [None req-21e8726b-da04-4770-9ccd-271b724f3f36 tempest-ServerRescueTestJSONUnderV235-11277943 tempest-ServerRescueTestJSONUnderV235-11277943-project-member] [instance: f91efd4b-851e-44bc-9cf2-7be8a2d2d7df] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1939.906696] env[61215]: DEBUG nova.network.neutron [-] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1939.913768] env[61215]: INFO nova.compute.manager [-] [instance: d49f702b-cd29-4491-938c-0291b351ef20] Took 0.03 seconds to deallocate network for instance. [ 1939.919141] env[61215]: DEBUG oslo_concurrency.lockutils [None req-21e8726b-da04-4770-9ccd-271b724f3f36 tempest-ServerRescueTestJSONUnderV235-11277943 tempest-ServerRescueTestJSONUnderV235-11277943-project-member] Lock "f91efd4b-851e-44bc-9cf2-7be8a2d2d7df" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 236.144s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1939.927914] env[61215]: DEBUG nova.compute.manager [None req-11da0ca4-84ac-45fc-8239-2771138848fa tempest-ServerPasswordTestJSON-753560478 tempest-ServerPasswordTestJSON-753560478-project-member] [instance: 0576d0b5-3890-4e1d-b208-40d46c2fdae7] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1939.959056] env[61215]: DEBUG nova.compute.manager [None req-11da0ca4-84ac-45fc-8239-2771138848fa tempest-ServerPasswordTestJSON-753560478 tempest-ServerPasswordTestJSON-753560478-project-member] [instance: 0576d0b5-3890-4e1d-b208-40d46c2fdae7] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1939.979785] env[61215]: DEBUG oslo_concurrency.lockutils [None req-11da0ca4-84ac-45fc-8239-2771138848fa tempest-ServerPasswordTestJSON-753560478 tempest-ServerPasswordTestJSON-753560478-project-member] Lock "0576d0b5-3890-4e1d-b208-40d46c2fdae7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 230.621s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1939.988707] env[61215]: DEBUG nova.compute.manager [None req-d898448a-6b40-4644-acae-d0e0930ae79a tempest-ServerShowV257Test-1987766564 tempest-ServerShowV257Test-1987766564-project-member] [instance: bf807d62-c8be-4819-9fc1-4b2d6d14cc39] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1940.018384] env[61215]: DEBUG nova.compute.manager [None req-d898448a-6b40-4644-acae-d0e0930ae79a tempest-ServerShowV257Test-1987766564 tempest-ServerShowV257Test-1987766564-project-member] [instance: bf807d62-c8be-4819-9fc1-4b2d6d14cc39] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1940.034809] env[61215]: DEBUG oslo_concurrency.lockutils [None req-361d7434-e052-4d23-b682-f02d37c50b7c tempest-ServersWithSpecificFlavorTestJSON-877201460 tempest-ServersWithSpecificFlavorTestJSON-877201460-project-member] Lock "d49f702b-cd29-4491-938c-0291b351ef20" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.201s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1940.035642] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "d49f702b-cd29-4491-938c-0291b351ef20" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 107.069s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1940.035846] env[61215]: INFO nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: d49f702b-cd29-4491-938c-0291b351ef20] During sync_power_state the instance has a pending task (deleting). Skip. [ 1940.036043] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "d49f702b-cd29-4491-938c-0291b351ef20" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1940.044918] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d898448a-6b40-4644-acae-d0e0930ae79a tempest-ServerShowV257Test-1987766564 tempest-ServerShowV257Test-1987766564-project-member] Lock "bf807d62-c8be-4819-9fc1-4b2d6d14cc39" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 224.343s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1940.056455] env[61215]: DEBUG nova.compute.manager [None req-8d522f22-dbde-4362-b0a2-aeb7cde90dd3 tempest-AttachInterfacesTestJSON-1825426080 tempest-AttachInterfacesTestJSON-1825426080-project-member] [instance: 12825ddf-86ee-4500-b43b-cf480dc54f3a] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1940.084210] env[61215]: DEBUG nova.compute.manager [None req-8d522f22-dbde-4362-b0a2-aeb7cde90dd3 tempest-AttachInterfacesTestJSON-1825426080 tempest-AttachInterfacesTestJSON-1825426080-project-member] [instance: 12825ddf-86ee-4500-b43b-cf480dc54f3a] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1940.104787] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8d522f22-dbde-4362-b0a2-aeb7cde90dd3 tempest-AttachInterfacesTestJSON-1825426080 tempest-AttachInterfacesTestJSON-1825426080-project-member] Lock "12825ddf-86ee-4500-b43b-cf480dc54f3a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 214.725s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1940.114008] env[61215]: DEBUG nova.compute.manager [None req-e4c48f72-89b3-444c-b557-7d91448a2997 tempest-ServerShowV254Test-703576172 tempest-ServerShowV254Test-703576172-project-member] [instance: a42577f4-29ba-446b-a561-745ff14d1696] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1940.138952] env[61215]: DEBUG nova.compute.manager [None req-e4c48f72-89b3-444c-b557-7d91448a2997 tempest-ServerShowV254Test-703576172 tempest-ServerShowV254Test-703576172-project-member] [instance: a42577f4-29ba-446b-a561-745ff14d1696] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1940.162558] env[61215]: DEBUG oslo_concurrency.lockutils [None req-e4c48f72-89b3-444c-b557-7d91448a2997 tempest-ServerShowV254Test-703576172 tempest-ServerShowV254Test-703576172-project-member] Lock "a42577f4-29ba-446b-a561-745ff14d1696" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 206.654s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1940.175269] env[61215]: DEBUG nova.compute.manager [None req-7a7245fd-f0e5-4f35-b608-3d0e4bb63c3c tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: e4cf3e92-a1a6-47ac-8625-37cdbf96cb35] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1940.198727] env[61215]: DEBUG nova.compute.manager [None req-7a7245fd-f0e5-4f35-b608-3d0e4bb63c3c tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: e4cf3e92-a1a6-47ac-8625-37cdbf96cb35] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 1940.223224] env[61215]: DEBUG oslo_concurrency.lockutils [None req-7a7245fd-f0e5-4f35-b608-3d0e4bb63c3c tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Lock "e4cf3e92-a1a6-47ac-8625-37cdbf96cb35" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.418s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1940.233529] env[61215]: DEBUG nova.compute.manager [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1940.297457] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1940.297716] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1940.299237] env[61215]: INFO nova.compute.claims [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1940.663293] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14cbe9ab-7f1b-482e-88e6-062ab618b577 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.670906] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-700c8e12-7602-4a4e-bd51-c1052ef10b80 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.700781] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32f8c504-9bfe-4c21-a46a-0523f1c26b29 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.707868] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e60867f-dcb2-43fc-a978-514f5ddfce9f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.721646] env[61215]: DEBUG nova.compute.provider_tree [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1940.729963] env[61215]: DEBUG nova.scheduler.client.report [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1940.744722] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.447s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1940.745219] env[61215]: DEBUG nova.compute.manager [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Start building networks asynchronously for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1940.777064] env[61215]: DEBUG nova.compute.utils [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Using /dev/sd instead of None {{(pid=61215) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1940.778767] env[61215]: DEBUG nova.compute.manager [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Allocating IP information in the background. {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 1940.778968] env[61215]: DEBUG nova.network.neutron [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] allocate_for_instance() {{(pid=61215) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1940.787718] env[61215]: DEBUG nova.compute.manager [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Start building block device mappings for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1940.852584] env[61215]: DEBUG nova.policy [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '72f531618b8e4fa9b97ff04c732827c9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '714a816bd7c142c096b091efbaccddcb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61215) authorize /opt/stack/nova/nova/policy.py:203}} [ 1940.862498] env[61215]: DEBUG nova.compute.manager [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Start spawning the instance on the hypervisor. {{(pid=61215) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1940.895039] env[61215]: DEBUG nova.virt.hardware [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:05:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:04:45Z,direct_url=,disk_format='vmdk',id=e91f0c25-9ff9-4937-8440-f47cfb2028bc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9bc3f57c82894c5d9e08b66e77a25dc5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:04:46Z,virtual_size=,visibility=), allow threads: False {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1940.895306] env[61215]: DEBUG nova.virt.hardware [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Flavor limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1940.895471] env[61215]: DEBUG nova.virt.hardware [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Image limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1940.895658] env[61215]: DEBUG nova.virt.hardware [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Flavor pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1940.895814] env[61215]: DEBUG nova.virt.hardware [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Image pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1940.895966] env[61215]: DEBUG nova.virt.hardware [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1940.896205] env[61215]: DEBUG nova.virt.hardware [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1940.896367] env[61215]: DEBUG nova.virt.hardware [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1940.896540] env[61215]: DEBUG nova.virt.hardware [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Got 1 possible topologies {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1940.896712] env[61215]: DEBUG nova.virt.hardware [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1940.896891] env[61215]: DEBUG nova.virt.hardware [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1940.898067] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-932507c2-f408-4e4a-a722-205d22095be4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.906608] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-573b6ff3-b5fa-41a6-8117-fc67235f4e18 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.235668] env[61215]: DEBUG nova.network.neutron [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Successfully created port: ad274a7b-6adc-49b6-bc0f-a900cfd7b3ae {{(pid=61215) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1941.954276] env[61215]: DEBUG nova.compute.manager [req-4a9c4313-f0f9-47ab-813a-957a7d935482 req-f04ae703-2467-4725-8238-1b1fd4d2a13d service nova] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Received event network-vif-plugged-ad274a7b-6adc-49b6-bc0f-a900cfd7b3ae {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1941.954527] env[61215]: DEBUG oslo_concurrency.lockutils [req-4a9c4313-f0f9-47ab-813a-957a7d935482 req-f04ae703-2467-4725-8238-1b1fd4d2a13d service nova] Acquiring lock "0d609df2-621c-456f-b8ce-a209e9052153-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1941.954724] env[61215]: DEBUG oslo_concurrency.lockutils [req-4a9c4313-f0f9-47ab-813a-957a7d935482 req-f04ae703-2467-4725-8238-1b1fd4d2a13d service nova] Lock "0d609df2-621c-456f-b8ce-a209e9052153-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1941.954879] env[61215]: DEBUG oslo_concurrency.lockutils [req-4a9c4313-f0f9-47ab-813a-957a7d935482 req-f04ae703-2467-4725-8238-1b1fd4d2a13d service nova] Lock "0d609df2-621c-456f-b8ce-a209e9052153-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1941.955063] env[61215]: DEBUG nova.compute.manager [req-4a9c4313-f0f9-47ab-813a-957a7d935482 req-f04ae703-2467-4725-8238-1b1fd4d2a13d service nova] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] No waiting events found dispatching network-vif-plugged-ad274a7b-6adc-49b6-bc0f-a900cfd7b3ae {{(pid=61215) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1941.955248] env[61215]: WARNING nova.compute.manager [req-4a9c4313-f0f9-47ab-813a-957a7d935482 req-f04ae703-2467-4725-8238-1b1fd4d2a13d service nova] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Received unexpected event network-vif-plugged-ad274a7b-6adc-49b6-bc0f-a900cfd7b3ae for instance with vm_state building and task_state spawning. [ 1942.116632] env[61215]: DEBUG nova.network.neutron [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Successfully updated port: ad274a7b-6adc-49b6-bc0f-a900cfd7b3ae {{(pid=61215) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1942.134116] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Acquiring lock "refresh_cache-0d609df2-621c-456f-b8ce-a209e9052153" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1942.134276] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Acquired lock "refresh_cache-0d609df2-621c-456f-b8ce-a209e9052153" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1942.134428] env[61215]: DEBUG nova.network.neutron [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1942.200284] env[61215]: DEBUG nova.network.neutron [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1942.380944] env[61215]: DEBUG nova.network.neutron [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Updating instance_info_cache with network_info: [{"id": "ad274a7b-6adc-49b6-bc0f-a900cfd7b3ae", "address": "fa:16:3e:b5:a9:2a", "network": {"id": "7eeb68c4-c3ff-4e6f-b7f9-6320ce2d34b3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.233", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9bc3f57c82894c5d9e08b66e77a25dc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2383c27-232e-4745-9b0a-2dcbaabb188b", "external-id": "nsx-vlan-transportzone-350", "segmentation_id": 350, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad274a7b-6a", "ovs_interfaceid": "ad274a7b-6adc-49b6-bc0f-a900cfd7b3ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1942.393802] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Releasing lock "refresh_cache-0d609df2-621c-456f-b8ce-a209e9052153" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1942.394187] env[61215]: DEBUG nova.compute.manager [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Instance network_info: |[{"id": "ad274a7b-6adc-49b6-bc0f-a900cfd7b3ae", "address": "fa:16:3e:b5:a9:2a", "network": {"id": "7eeb68c4-c3ff-4e6f-b7f9-6320ce2d34b3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.233", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9bc3f57c82894c5d9e08b66e77a25dc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2383c27-232e-4745-9b0a-2dcbaabb188b", "external-id": "nsx-vlan-transportzone-350", "segmentation_id": 350, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad274a7b-6a", "ovs_interfaceid": "ad274a7b-6adc-49b6-bc0f-a900cfd7b3ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1942.394652] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b5:a9:2a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c2383c27-232e-4745-9b0a-2dcbaabb188b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ad274a7b-6adc-49b6-bc0f-a900cfd7b3ae', 'vif_model': 'vmxnet3'}] {{(pid=61215) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1942.402095] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Creating folder: Project (714a816bd7c142c096b091efbaccddcb). Parent ref: group-v352463. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1942.402679] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7c4b8cec-6d53-4f83-85e3-1752810820a2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.413616] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Created folder: Project (714a816bd7c142c096b091efbaccddcb) in parent group-v352463. [ 1942.413797] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Creating folder: Instances. Parent ref: group-v352539. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1942.414414] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-17566208-6e3d-4d0c-a1d1-7449f39a6b7f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.423134] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Created folder: Instances in parent group-v352539. [ 1942.423382] env[61215]: DEBUG oslo.service.loopingcall [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1942.423573] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Creating VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1942.423941] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7cabda4c-f54a-4fa0-84a7-366f33546243 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.444457] env[61215]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1942.444457] env[61215]: value = "task-1690387" [ 1942.444457] env[61215]: _type = "Task" [ 1942.444457] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1942.452425] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690387, 'name': CreateVM_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.079964] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690387, 'name': CreateVM_Task, 'duration_secs': 0.306839} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1946.079964] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Created VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1946.079964] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1946.079964] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1946.079964] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1946.079964] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd738ff2-89e1-4796-8dde-f1499e479400 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.079964] env[61215]: DEBUG oslo_vmware.api [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Waiting for the task: (returnval){ [ 1946.079964] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52ba54c7-7764-1bce-e6db-d1e441db53fc" [ 1946.079964] env[61215]: _type = "Task" [ 1946.079964] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1946.079964] env[61215]: DEBUG oslo_vmware.api [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52ba54c7-7764-1bce-e6db-d1e441db53fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.079964] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1946.079964] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Processing image e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1946.079964] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1946.554161] env[61215]: DEBUG nova.compute.manager [req-09f33184-9631-47a6-905f-f27328c4a8af req-d809cdd7-e60c-4335-89e3-656d71a1db00 service nova] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Received event network-changed-ad274a7b-6adc-49b6-bc0f-a900cfd7b3ae {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 1946.554161] env[61215]: DEBUG nova.compute.manager [req-09f33184-9631-47a6-905f-f27328c4a8af req-d809cdd7-e60c-4335-89e3-656d71a1db00 service nova] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Refreshing instance network info cache due to event network-changed-ad274a7b-6adc-49b6-bc0f-a900cfd7b3ae. {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 1946.554161] env[61215]: DEBUG oslo_concurrency.lockutils [req-09f33184-9631-47a6-905f-f27328c4a8af req-d809cdd7-e60c-4335-89e3-656d71a1db00 service nova] Acquiring lock "refresh_cache-0d609df2-621c-456f-b8ce-a209e9052153" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1946.554161] env[61215]: DEBUG oslo_concurrency.lockutils [req-09f33184-9631-47a6-905f-f27328c4a8af req-d809cdd7-e60c-4335-89e3-656d71a1db00 service nova] Acquired lock "refresh_cache-0d609df2-621c-456f-b8ce-a209e9052153" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1946.554161] env[61215]: DEBUG nova.network.neutron [req-09f33184-9631-47a6-905f-f27328c4a8af req-d809cdd7-e60c-4335-89e3-656d71a1db00 service nova] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Refreshing network info cache for port ad274a7b-6adc-49b6-bc0f-a900cfd7b3ae {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1946.834904] env[61215]: DEBUG nova.network.neutron [req-09f33184-9631-47a6-905f-f27328c4a8af req-d809cdd7-e60c-4335-89e3-656d71a1db00 service nova] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Updated VIF entry in instance network info cache for port ad274a7b-6adc-49b6-bc0f-a900cfd7b3ae. {{(pid=61215) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1946.835272] env[61215]: DEBUG nova.network.neutron [req-09f33184-9631-47a6-905f-f27328c4a8af req-d809cdd7-e60c-4335-89e3-656d71a1db00 service nova] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Updating instance_info_cache with network_info: [{"id": "ad274a7b-6adc-49b6-bc0f-a900cfd7b3ae", "address": "fa:16:3e:b5:a9:2a", "network": {"id": "7eeb68c4-c3ff-4e6f-b7f9-6320ce2d34b3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.233", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "9bc3f57c82894c5d9e08b66e77a25dc5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c2383c27-232e-4745-9b0a-2dcbaabb188b", "external-id": "nsx-vlan-transportzone-350", "segmentation_id": 350, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad274a7b-6a", "ovs_interfaceid": "ad274a7b-6adc-49b6-bc0f-a900cfd7b3ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1946.845566] env[61215]: DEBUG oslo_concurrency.lockutils [req-09f33184-9631-47a6-905f-f27328c4a8af req-d809cdd7-e60c-4335-89e3-656d71a1db00 service nova] Releasing lock "refresh_cache-0d609df2-621c-456f-b8ce-a209e9052153" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1953.391281] env[61215]: DEBUG oslo_concurrency.lockutils [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Acquiring lock "fb5fb791-5f62-4717-8d8f-7d56ffda15be" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1953.391624] env[61215]: DEBUG oslo_concurrency.lockutils [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Lock "fb5fb791-5f62-4717-8d8f-7d56ffda15be" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1974.237735] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1977.654244] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1978.655070] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1979.655138] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1979.655554] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Starting heal instance info cache {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 1979.655554] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Rebuilding the list of instances to heal {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1979.678672] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1979.678851] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1979.678960] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1979.679104] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1979.679235] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1979.679359] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1979.679485] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1979.679595] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1979.679711] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1979.679872] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1979.680020] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Didn't find any instances for network info cache update. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 1979.680498] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1979.681014] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61215) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 1980.654482] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1980.654714] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1981.654174] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1981.654572] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1981.666613] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1981.666822] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1981.666990] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1981.667169] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61215) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1981.668288] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26b20d52-84c3-415d-90eb-5cdef12cceb5 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.677007] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-739f60e1-eca9-4e07-b509-af2226cc1787 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.693462] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f844ce8b-f3e8-4820-b1e4-fbf0960a87a5 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.699721] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0193eb81-2ffa-483c-9e5f-3effa07797cd {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.727883] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181309MB free_disk=173GB free_vcpus=48 pci_devices=None {{(pid=61215) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1981.728024] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1981.728215] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1981.800706] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1981.800870] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1981.801010] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 8d4665c7-67de-4ab3-a8b7-596a5e1152ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1981.801207] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1981.801341] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 2e186217-c1e1-40c6-8d84-988f35f6b93d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1981.801465] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1981.801585] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance c233ab81-232d-49be-a176-bf846f0d8cc3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1981.801703] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance bb56c470-9f85-44b1-b1ec-f44236e9de51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1981.801849] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 38fe96cf-e570-4c0e-af6f-2f199efc06ff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1981.802008] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 0d609df2-621c-456f-b8ce-a209e9052153 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1981.812337] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 66420497-c0f6-4f1d-86ee-23d53400e325 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1981.822036] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 59d93243-c15c-4554-863b-779d94b3d858 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1981.830943] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance f3a3a510-a085-4388-b49d-b4371095b436 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1981.839952] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance ea0fe7f8-9070-4b17-bc36-d65c62a18923 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1981.848813] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 17d70ed3-4a82-48c8-95ad-c81fb0772e42 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1981.857250] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance be5cf1b4-da97-4944-bb38-f10943576b8f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1981.882401] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 3a7e93d3-cef2-4b3f-a20b-da373780e1c8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1981.893188] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 90f1ad20-b501-4f1e-95ff-1d428c51c242 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1981.902238] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 02e32086-8fe7-4def-ac71-7c4c43ee0f23 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1981.910905] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance fb5fb791-5f62-4717-8d8f-7d56ffda15be has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1981.911117] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1981.911270] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1982.143401] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-880992f5-bbd6-42c4-82c1-d31f09606fc3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.151772] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b97e4167-06ae-49a1-ab37-e1a56969bbd6 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.184070] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c6c89a6-8a46-4fc5-87b8-257d675178ef {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.191622] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29fbf2cd-deda-4ed4-99fb-15c69c48001d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.204841] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1982.220058] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1982.234171] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61215) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1982.234356] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.506s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1983.584852] env[61215]: WARNING oslo_vmware.rw_handles [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1983.584852] env[61215]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1983.584852] env[61215]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1983.584852] env[61215]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1983.584852] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1983.584852] env[61215]: ERROR oslo_vmware.rw_handles response.begin() [ 1983.584852] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1983.584852] env[61215]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1983.584852] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1983.584852] env[61215]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1983.584852] env[61215]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1983.584852] env[61215]: ERROR oslo_vmware.rw_handles [ 1983.585386] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Downloaded image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to vmware_temp/9fa0c294-d73f-4e0c-8d0b-787744d36613/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1983.587749] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Caching image {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1983.587992] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Copying Virtual Disk [datastore1] vmware_temp/9fa0c294-d73f-4e0c-8d0b-787744d36613/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk to [datastore1] vmware_temp/9fa0c294-d73f-4e0c-8d0b-787744d36613/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk {{(pid=61215) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1983.588293] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-38eb999a-343c-4039-b492-06507d2d09d2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.596894] env[61215]: DEBUG oslo_vmware.api [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Waiting for the task: (returnval){ [ 1983.596894] env[61215]: value = "task-1690388" [ 1983.596894] env[61215]: _type = "Task" [ 1983.596894] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1983.605108] env[61215]: DEBUG oslo_vmware.api [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Task: {'id': task-1690388, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.107047] env[61215]: DEBUG oslo_vmware.exceptions [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Fault InvalidArgument not matched. {{(pid=61215) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1984.107360] env[61215]: DEBUG oslo_concurrency.lockutils [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1984.107905] env[61215]: ERROR nova.compute.manager [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1984.107905] env[61215]: Faults: ['InvalidArgument'] [ 1984.107905] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Traceback (most recent call last): [ 1984.107905] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1984.107905] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] yield resources [ 1984.107905] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1984.107905] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] self.driver.spawn(context, instance, image_meta, [ 1984.107905] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1984.107905] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1984.107905] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1984.107905] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] self._fetch_image_if_missing(context, vi) [ 1984.107905] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1984.107905] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] image_cache(vi, tmp_image_ds_loc) [ 1984.107905] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1984.107905] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] vm_util.copy_virtual_disk( [ 1984.107905] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1984.107905] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] session._wait_for_task(vmdk_copy_task) [ 1984.107905] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1984.107905] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] return self.wait_for_task(task_ref) [ 1984.107905] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1984.107905] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] return evt.wait() [ 1984.107905] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1984.107905] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] result = hub.switch() [ 1984.107905] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1984.107905] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] return self.greenlet.switch() [ 1984.107905] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1984.107905] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] self.f(*self.args, **self.kw) [ 1984.107905] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1984.107905] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] raise exceptions.translate_fault(task_info.error) [ 1984.107905] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1984.107905] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Faults: ['InvalidArgument'] [ 1984.107905] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] [ 1984.108889] env[61215]: INFO nova.compute.manager [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Terminating instance [ 1984.109851] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1984.110816] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1984.110816] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1fb0048c-9127-42d6-969c-5674bc27bb19 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.112712] env[61215]: DEBUG nova.compute.manager [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1984.112905] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1984.113696] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-282413eb-8b86-4490-83df-65d10ae26074 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.120408] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Unregistering the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1984.120625] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-89cbabd8-b2db-49af-9390-728aab79fc94 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.123666] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1984.123666] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61215) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1984.123877] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a320f1b-b66e-47c3-a177-01c859e92bdf {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.128288] env[61215]: DEBUG oslo_vmware.api [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Waiting for the task: (returnval){ [ 1984.128288] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52219d9e-aac2-156a-d37b-2de61ea4bc13" [ 1984.128288] env[61215]: _type = "Task" [ 1984.128288] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1984.135877] env[61215]: DEBUG oslo_vmware.api [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52219d9e-aac2-156a-d37b-2de61ea4bc13, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.602349] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Unregistered the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1984.602629] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Deleting contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1984.602747] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Deleting the datastore file [datastore1] fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1984.603078] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2254718d-1b7a-4568-9ea3-6879ac249d9f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.609492] env[61215]: DEBUG oslo_vmware.api [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Waiting for the task: (returnval){ [ 1984.609492] env[61215]: value = "task-1690390" [ 1984.609492] env[61215]: _type = "Task" [ 1984.609492] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1984.636604] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Preparing fetch location {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1984.636812] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Creating directory with path [datastore1] vmware_temp/2a9c13a3-8ae3-4c45-a645-3326d393daa8/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1984.637057] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-825404f7-c3da-4fc8-8c3f-3961b9cfb4d2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.657571] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Created directory with path [datastore1] vmware_temp/2a9c13a3-8ae3-4c45-a645-3326d393daa8/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1984.657801] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Fetch image to [datastore1] vmware_temp/2a9c13a3-8ae3-4c45-a645-3326d393daa8/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1984.657996] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to [datastore1] vmware_temp/2a9c13a3-8ae3-4c45-a645-3326d393daa8/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1984.658791] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6df04eb-26e3-44f2-aa4f-8ea5c493f4ca {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.665538] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d4e3e80-c10a-4eec-a53a-17621afbcafc {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.674885] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1ab5454-3e22-450b-88e1-e798d440cc8a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.705355] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eb4c458-82a9-4742-bfd7-ee9073fa1f6b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.711236] env[61215]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-78eb9c51-f4bf-4e0f-a85d-f1db24699b70 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.730514] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1984.922196] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1984.923876] env[61215]: ERROR nova.compute.manager [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image e91f0c25-9ff9-4937-8440-f47cfb2028bc. [ 1984.923876] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Traceback (most recent call last): [ 1984.923876] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1984.923876] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1984.923876] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1984.923876] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] result = getattr(controller, method)(*args, **kwargs) [ 1984.923876] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1984.923876] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] return self._get(image_id) [ 1984.923876] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1984.923876] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1984.923876] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1984.923876] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] resp, body = self.http_client.get(url, headers=header) [ 1984.923876] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1984.923876] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] return self.request(url, 'GET', **kwargs) [ 1984.923876] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1984.923876] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] return self._handle_response(resp) [ 1984.923876] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1984.923876] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] raise exc.from_response(resp, resp.content) [ 1984.923876] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1984.923876] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] [ 1984.923876] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] During handling of the above exception, another exception occurred: [ 1984.923876] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] [ 1984.923876] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Traceback (most recent call last): [ 1984.923876] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 1984.923876] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] yield resources [ 1984.923876] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1984.923876] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] self.driver.spawn(context, instance, image_meta, [ 1984.923876] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1984.923876] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1984.923876] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1984.923876] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] self._fetch_image_if_missing(context, vi) [ 1984.923876] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1984.923876] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] image_fetch(context, vi, tmp_image_ds_loc) [ 1984.923876] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1984.923876] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] images.fetch_image( [ 1984.923876] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1984.923876] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] metadata = IMAGE_API.get(context, image_ref) [ 1984.924998] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1984.924998] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] return session.show(context, image_id, [ 1984.924998] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1984.924998] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] _reraise_translated_image_exception(image_id) [ 1984.924998] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1984.924998] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] raise new_exc.with_traceback(exc_trace) [ 1984.924998] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1984.924998] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1984.924998] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1984.924998] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] result = getattr(controller, method)(*args, **kwargs) [ 1984.924998] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1984.924998] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] return self._get(image_id) [ 1984.924998] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1984.924998] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1984.924998] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1984.924998] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] resp, body = self.http_client.get(url, headers=header) [ 1984.924998] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1984.924998] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] return self.request(url, 'GET', **kwargs) [ 1984.924998] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1984.924998] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] return self._handle_response(resp) [ 1984.924998] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1984.924998] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] raise exc.from_response(resp, resp.content) [ 1984.924998] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] nova.exception.ImageNotAuthorized: Not authorized for image e91f0c25-9ff9-4937-8440-f47cfb2028bc. [ 1984.924998] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] [ 1984.924998] env[61215]: INFO nova.compute.manager [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Terminating instance [ 1984.925852] env[61215]: DEBUG oslo_concurrency.lockutils [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1984.926080] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1984.926710] env[61215]: DEBUG nova.compute.manager [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1984.926905] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1984.927180] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-78ffd0a8-5605-46b7-8cba-7046aacaf38e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.930040] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-480be87f-1d77-4d19-87f8-b10e6835c9cd {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.936998] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Unregistering the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1984.937232] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b0e710f2-dfc9-4a3e-956a-f426ce84ec0d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.939359] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1984.939528] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61215) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1984.940493] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8967d3e-fd65-4a71-9af7-7d41a323fd2e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.945174] env[61215]: DEBUG oslo_vmware.api [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Waiting for the task: (returnval){ [ 1984.945174] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52b996fa-adf0-0b58-73b8-9d6d3e60a112" [ 1984.945174] env[61215]: _type = "Task" [ 1984.945174] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1984.952613] env[61215]: DEBUG oslo_vmware.api [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52b996fa-adf0-0b58-73b8-9d6d3e60a112, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1985.095073] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Unregistered the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1985.095306] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Deleting contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1985.095494] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Deleting the datastore file [datastore1] 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1985.095756] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4c85ebde-471d-4d9d-8887-336217134751 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.102078] env[61215]: DEBUG oslo_vmware.api [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Waiting for the task: (returnval){ [ 1985.102078] env[61215]: value = "task-1690392" [ 1985.102078] env[61215]: _type = "Task" [ 1985.102078] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1985.109732] env[61215]: DEBUG oslo_vmware.api [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Task: {'id': task-1690392, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1985.117174] env[61215]: DEBUG oslo_vmware.api [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Task: {'id': task-1690390, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.091511} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1985.117293] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1985.117496] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Deleted contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1985.117688] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1985.117882] env[61215]: INFO nova.compute.manager [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Took 1.00 seconds to destroy the instance on the hypervisor. [ 1985.120078] env[61215]: DEBUG nova.compute.claims [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Aborting claim: {{(pid=61215) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1985.120258] env[61215]: DEBUG oslo_concurrency.lockutils [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1985.120488] env[61215]: DEBUG oslo_concurrency.lockutils [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1985.394296] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-331e1adc-d17d-405e-9848-7bc4ea69e032 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.403413] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1cfd1b2-be41-482b-81f3-94cf303a49a6 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.432865] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aab9625-9b3c-45e7-a753-a0655aba8880 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.440449] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51ace816-3abb-4c94-852a-02661f5bc61b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.456173] env[61215]: DEBUG nova.compute.provider_tree [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1985.463177] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Preparing fetch location {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1985.463421] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Creating directory with path [datastore1] vmware_temp/c5507bd7-4d17-4bb6-b3c9-8d49c3d3c52a/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1985.463760] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6fd1df70-a761-4201-a515-6b4c5bd493d1 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.466728] env[61215]: DEBUG nova.scheduler.client.report [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1985.480709] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Created directory with path [datastore1] vmware_temp/c5507bd7-4d17-4bb6-b3c9-8d49c3d3c52a/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1985.480920] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Fetch image to [datastore1] vmware_temp/c5507bd7-4d17-4bb6-b3c9-8d49c3d3c52a/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1985.481110] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to [datastore1] vmware_temp/c5507bd7-4d17-4bb6-b3c9-8d49c3d3c52a/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1985.481893] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27b5d273-93a7-4bea-9162-a1574e5fa5b0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.485484] env[61215]: DEBUG oslo_concurrency.lockutils [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.365s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1985.485992] env[61215]: ERROR nova.compute.manager [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1985.485992] env[61215]: Faults: ['InvalidArgument'] [ 1985.485992] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Traceback (most recent call last): [ 1985.485992] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1985.485992] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] self.driver.spawn(context, instance, image_meta, [ 1985.485992] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1985.485992] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1985.485992] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1985.485992] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] self._fetch_image_if_missing(context, vi) [ 1985.485992] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1985.485992] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] image_cache(vi, tmp_image_ds_loc) [ 1985.485992] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1985.485992] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] vm_util.copy_virtual_disk( [ 1985.485992] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1985.485992] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] session._wait_for_task(vmdk_copy_task) [ 1985.485992] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1985.485992] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] return self.wait_for_task(task_ref) [ 1985.485992] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1985.485992] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] return evt.wait() [ 1985.485992] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1985.485992] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] result = hub.switch() [ 1985.485992] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1985.485992] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] return self.greenlet.switch() [ 1985.485992] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1985.485992] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] self.f(*self.args, **self.kw) [ 1985.485992] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1985.485992] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] raise exceptions.translate_fault(task_info.error) [ 1985.485992] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1985.485992] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Faults: ['InvalidArgument'] [ 1985.485992] env[61215]: ERROR nova.compute.manager [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] [ 1985.486713] env[61215]: DEBUG nova.compute.utils [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] VimFaultException {{(pid=61215) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1985.489022] env[61215]: DEBUG nova.compute.manager [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Build of instance fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3 was re-scheduled: A specified parameter was not correct: fileType [ 1985.489022] env[61215]: Faults: ['InvalidArgument'] {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1985.489022] env[61215]: DEBUG nova.compute.manager [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Unplugging VIFs for instance {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1985.489022] env[61215]: DEBUG nova.compute.manager [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1985.489204] env[61215]: DEBUG nova.compute.manager [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1985.489367] env[61215]: DEBUG nova.network.neutron [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1985.494389] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f1ddef4-4840-4fa6-9a52-ab6a13deb717 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.504395] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f917be6-1ed4-4556-ad80-b4bfd6d39a39 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.537171] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8af40769-6ea9-4809-bbfb-948a851391cb {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.543411] env[61215]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-74b368d2-fbca-4709-9633-54812f4e81e8 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.569141] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1985.612472] env[61215]: DEBUG oslo_vmware.api [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Task: {'id': task-1690392, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.071156} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1985.612743] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1985.612917] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Deleted contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1985.613112] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1985.613299] env[61215]: INFO nova.compute.manager [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Took 0.69 seconds to destroy the instance on the hypervisor. [ 1985.615408] env[61215]: DEBUG nova.compute.claims [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Aborting claim: {{(pid=61215) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1985.615632] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1985.615786] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1985.623231] env[61215]: DEBUG oslo_vmware.rw_handles [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c5507bd7-4d17-4bb6-b3c9-8d49c3d3c52a/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1985.685143] env[61215]: DEBUG oslo_vmware.rw_handles [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Completed reading data from the image iterator. {{(pid=61215) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1985.685383] env[61215]: DEBUG oslo_vmware.rw_handles [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c5507bd7-4d17-4bb6-b3c9-8d49c3d3c52a/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1985.858983] env[61215]: DEBUG nova.network.neutron [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1985.872234] env[61215]: INFO nova.compute.manager [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Took 0.38 seconds to deallocate network for instance. [ 1985.949050] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a375dc74-834d-4858-871a-9ab9d6664f4f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.956764] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cd0bd85-092e-4a3f-a6b9-ad2d29a63004 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.988180] env[61215]: INFO nova.scheduler.client.report [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Deleted allocations for instance fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3 [ 1985.994400] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dcc2244-64f2-4cca-abd5-542dfe5697b5 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.003103] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66854882-9df4-4c73-9048-f9783f6d4860 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.016793] env[61215]: DEBUG nova.compute.provider_tree [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1986.018728] env[61215]: DEBUG oslo_concurrency.lockutils [None req-5c33106f-0ba6-4ad1-bf20-8728788580e7 tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Lock "fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 678.884s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1986.019863] env[61215]: DEBUG oslo_concurrency.lockutils [None req-48c03b85-d937-406e-a729-a9d9a3ce5f6c tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Lock "fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 479.987s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1986.020224] env[61215]: DEBUG oslo_concurrency.lockutils [None req-48c03b85-d937-406e-a729-a9d9a3ce5f6c tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Acquiring lock "fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1986.020441] env[61215]: DEBUG oslo_concurrency.lockutils [None req-48c03b85-d937-406e-a729-a9d9a3ce5f6c tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Lock "fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1986.020611] env[61215]: DEBUG oslo_concurrency.lockutils [None req-48c03b85-d937-406e-a729-a9d9a3ce5f6c tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Lock "fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1986.023599] env[61215]: INFO nova.compute.manager [None req-48c03b85-d937-406e-a729-a9d9a3ce5f6c tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Terminating instance [ 1986.025313] env[61215]: DEBUG nova.compute.manager [None req-48c03b85-d937-406e-a729-a9d9a3ce5f6c tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1986.025508] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-48c03b85-d937-406e-a729-a9d9a3ce5f6c tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1986.025748] env[61215]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5d755f19-6779-4ec6-b351-41cafdc34331 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.029303] env[61215]: DEBUG nova.scheduler.client.report [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1986.032112] env[61215]: DEBUG nova.compute.manager [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1986.040603] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2391aaa6-d462-49de-aed6-d0620f7e1dd4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.051509] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.436s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1986.052237] env[61215]: ERROR nova.compute.manager [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image e91f0c25-9ff9-4937-8440-f47cfb2028bc. [ 1986.052237] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Traceback (most recent call last): [ 1986.052237] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1986.052237] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1986.052237] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1986.052237] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] result = getattr(controller, method)(*args, **kwargs) [ 1986.052237] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1986.052237] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] return self._get(image_id) [ 1986.052237] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1986.052237] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1986.052237] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1986.052237] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] resp, body = self.http_client.get(url, headers=header) [ 1986.052237] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1986.052237] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] return self.request(url, 'GET', **kwargs) [ 1986.052237] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1986.052237] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] return self._handle_response(resp) [ 1986.052237] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1986.052237] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] raise exc.from_response(resp, resp.content) [ 1986.052237] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1986.052237] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] [ 1986.052237] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] During handling of the above exception, another exception occurred: [ 1986.052237] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] [ 1986.052237] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Traceback (most recent call last): [ 1986.052237] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1986.052237] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] self.driver.spawn(context, instance, image_meta, [ 1986.052237] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1986.052237] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1986.052237] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1986.052237] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] self._fetch_image_if_missing(context, vi) [ 1986.052237] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1986.052237] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] image_fetch(context, vi, tmp_image_ds_loc) [ 1986.052237] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1986.052237] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] images.fetch_image( [ 1986.052237] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1986.052237] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] metadata = IMAGE_API.get(context, image_ref) [ 1986.052237] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1986.052237] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] return session.show(context, image_id, [ 1986.053078] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1986.053078] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] _reraise_translated_image_exception(image_id) [ 1986.053078] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1986.053078] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] raise new_exc.with_traceback(exc_trace) [ 1986.053078] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1986.053078] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1986.053078] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1986.053078] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] result = getattr(controller, method)(*args, **kwargs) [ 1986.053078] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1986.053078] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] return self._get(image_id) [ 1986.053078] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1986.053078] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1986.053078] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1986.053078] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] resp, body = self.http_client.get(url, headers=header) [ 1986.053078] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1986.053078] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] return self.request(url, 'GET', **kwargs) [ 1986.053078] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1986.053078] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] return self._handle_response(resp) [ 1986.053078] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1986.053078] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] raise exc.from_response(resp, resp.content) [ 1986.053078] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] nova.exception.ImageNotAuthorized: Not authorized for image e91f0c25-9ff9-4937-8440-f47cfb2028bc. [ 1986.053078] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] [ 1986.053078] env[61215]: DEBUG nova.compute.utils [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Not authorized for image e91f0c25-9ff9-4937-8440-f47cfb2028bc. {{(pid=61215) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1986.055670] env[61215]: DEBUG nova.compute.manager [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Build of instance 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f was re-scheduled: Not authorized for image e91f0c25-9ff9-4937-8440-f47cfb2028bc. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 1986.056133] env[61215]: DEBUG nova.compute.manager [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Unplugging VIFs for instance {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 1986.056316] env[61215]: DEBUG nova.compute.manager [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 1986.056473] env[61215]: DEBUG nova.compute.manager [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1986.056638] env[61215]: DEBUG nova.network.neutron [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1986.070873] env[61215]: WARNING nova.virt.vmwareapi.vmops [None req-48c03b85-d937-406e-a729-a9d9a3ce5f6c tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3 could not be found. [ 1986.071079] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-48c03b85-d937-406e-a729-a9d9a3ce5f6c tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1986.071267] env[61215]: INFO nova.compute.manager [None req-48c03b85-d937-406e-a729-a9d9a3ce5f6c tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1986.071513] env[61215]: DEBUG oslo.service.loopingcall [None req-48c03b85-d937-406e-a729-a9d9a3ce5f6c tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1986.073615] env[61215]: DEBUG nova.compute.manager [-] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1986.073712] env[61215]: DEBUG nova.network.neutron [-] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1986.090138] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1986.090380] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1986.092356] env[61215]: INFO nova.compute.claims [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1986.101816] env[61215]: DEBUG nova.network.neutron [-] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1986.112812] env[61215]: INFO nova.compute.manager [-] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] Took 0.04 seconds to deallocate network for instance. [ 1986.200695] env[61215]: DEBUG neutronclient.v2_0.client [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61215) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1986.202802] env[61215]: ERROR nova.compute.manager [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1986.202802] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Traceback (most recent call last): [ 1986.202802] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1986.202802] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1986.202802] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1986.202802] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] result = getattr(controller, method)(*args, **kwargs) [ 1986.202802] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1986.202802] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] return self._get(image_id) [ 1986.202802] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1986.202802] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1986.202802] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1986.202802] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] resp, body = self.http_client.get(url, headers=header) [ 1986.202802] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1986.202802] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] return self.request(url, 'GET', **kwargs) [ 1986.202802] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1986.202802] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] return self._handle_response(resp) [ 1986.202802] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1986.202802] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] raise exc.from_response(resp, resp.content) [ 1986.202802] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1986.202802] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] [ 1986.202802] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] During handling of the above exception, another exception occurred: [ 1986.202802] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] [ 1986.202802] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Traceback (most recent call last): [ 1986.202802] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 1986.202802] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] self.driver.spawn(context, instance, image_meta, [ 1986.202802] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1986.202802] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1986.202802] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1986.202802] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] self._fetch_image_if_missing(context, vi) [ 1986.202802] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1986.202802] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] image_fetch(context, vi, tmp_image_ds_loc) [ 1986.202802] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1986.202802] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] images.fetch_image( [ 1986.202802] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1986.202802] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] metadata = IMAGE_API.get(context, image_ref) [ 1986.202802] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1986.202802] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] return session.show(context, image_id, [ 1986.203665] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1986.203665] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] _reraise_translated_image_exception(image_id) [ 1986.203665] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1986.203665] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] raise new_exc.with_traceback(exc_trace) [ 1986.203665] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1986.203665] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1986.203665] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1986.203665] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] result = getattr(controller, method)(*args, **kwargs) [ 1986.203665] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1986.203665] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] return self._get(image_id) [ 1986.203665] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1986.203665] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1986.203665] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1986.203665] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] resp, body = self.http_client.get(url, headers=header) [ 1986.203665] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1986.203665] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] return self.request(url, 'GET', **kwargs) [ 1986.203665] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1986.203665] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] return self._handle_response(resp) [ 1986.203665] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1986.203665] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] raise exc.from_response(resp, resp.content) [ 1986.203665] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] nova.exception.ImageNotAuthorized: Not authorized for image e91f0c25-9ff9-4937-8440-f47cfb2028bc. [ 1986.203665] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] [ 1986.203665] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] During handling of the above exception, another exception occurred: [ 1986.203665] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] [ 1986.203665] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Traceback (most recent call last): [ 1986.203665] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/compute/manager.py", line 2448, in _do_build_and_run_instance [ 1986.203665] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] self._build_and_run_instance(context, instance, image, [ 1986.203665] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/compute/manager.py", line 2740, in _build_and_run_instance [ 1986.203665] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] raise exception.RescheduledException( [ 1986.203665] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] nova.exception.RescheduledException: Build of instance 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f was re-scheduled: Not authorized for image e91f0c25-9ff9-4937-8440-f47cfb2028bc. [ 1986.203665] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] [ 1986.203665] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] During handling of the above exception, another exception occurred: [ 1986.203665] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] [ 1986.203665] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Traceback (most recent call last): [ 1986.203665] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1986.203665] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] ret = obj(*args, **kwargs) [ 1986.203665] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1986.203665] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] exception_handler_v20(status_code, error_body) [ 1986.203665] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1986.204725] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] raise client_exc(message=error_message, [ 1986.204725] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1986.204725] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Neutron server returns request_ids: ['req-3678720c-89a2-4ff2-9d1c-ff900927cf27'] [ 1986.204725] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] [ 1986.204725] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] During handling of the above exception, another exception occurred: [ 1986.204725] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] [ 1986.204725] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Traceback (most recent call last): [ 1986.204725] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/compute/manager.py", line 3037, in _cleanup_allocated_networks [ 1986.204725] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] self._deallocate_network(context, instance, requested_networks) [ 1986.204725] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1986.204725] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] self.network_api.deallocate_for_instance( [ 1986.204725] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1986.204725] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] data = neutron.list_ports(**search_opts) [ 1986.204725] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1986.204725] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] ret = obj(*args, **kwargs) [ 1986.204725] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1986.204725] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] return self.list('ports', self.ports_path, retrieve_all, [ 1986.204725] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1986.204725] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] ret = obj(*args, **kwargs) [ 1986.204725] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1986.204725] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] for r in self._pagination(collection, path, **params): [ 1986.204725] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1986.204725] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] res = self.get(path, params=params) [ 1986.204725] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1986.204725] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] ret = obj(*args, **kwargs) [ 1986.204725] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1986.204725] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] return self.retry_request("GET", action, body=body, [ 1986.204725] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1986.204725] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] ret = obj(*args, **kwargs) [ 1986.204725] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1986.204725] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] return self.do_request(method, action, body=body, [ 1986.204725] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1986.204725] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] ret = obj(*args, **kwargs) [ 1986.204725] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1986.204725] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] self._handle_fault_response(status_code, replybody, resp) [ 1986.204725] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1986.204725] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] raise exception.Unauthorized() [ 1986.204725] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] nova.exception.Unauthorized: Not authorized. [ 1986.204725] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] [ 1986.206664] env[61215]: DEBUG oslo_concurrency.lockutils [None req-48c03b85-d937-406e-a729-a9d9a3ce5f6c tempest-ServerMetadataTestJSON-1553062290 tempest-ServerMetadataTestJSON-1553062290-project-member] Lock "fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.186s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1986.206664] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 153.240s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1986.206664] env[61215]: INFO nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3] During sync_power_state the instance has a pending task (deleting). Skip. [ 1986.206910] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "fb1f5eae-e1b9-4eed-9c6c-70f0e365a3a3" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1986.257272] env[61215]: INFO nova.scheduler.client.report [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Deleted allocations for instance 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f [ 1986.276200] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0925c5b1-8752-4eb8-95c1-f8ceb2c0dd34 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Lock "3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 582.923s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1986.277287] env[61215]: DEBUG oslo_concurrency.lockutils [None req-e0a95887-8043-4c33-bca7-5bfb81d4ca65 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Lock "3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 386.804s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1986.277513] env[61215]: DEBUG oslo_concurrency.lockutils [None req-e0a95887-8043-4c33-bca7-5bfb81d4ca65 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Acquiring lock "3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1986.277718] env[61215]: DEBUG oslo_concurrency.lockutils [None req-e0a95887-8043-4c33-bca7-5bfb81d4ca65 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Lock "3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1986.277887] env[61215]: DEBUG oslo_concurrency.lockutils [None req-e0a95887-8043-4c33-bca7-5bfb81d4ca65 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Lock "3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1986.280087] env[61215]: INFO nova.compute.manager [None req-e0a95887-8043-4c33-bca7-5bfb81d4ca65 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Terminating instance [ 1986.281852] env[61215]: DEBUG nova.compute.manager [None req-e0a95887-8043-4c33-bca7-5bfb81d4ca65 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 1986.282288] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-e0a95887-8043-4c33-bca7-5bfb81d4ca65 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1986.282737] env[61215]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1b23068f-ec55-4f93-a3a2-26abafbb8744 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.291275] env[61215]: DEBUG nova.compute.manager [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 1986.298175] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9b89711-78e2-452a-a479-e3c244f4065b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.329444] env[61215]: WARNING nova.virt.vmwareapi.vmops [None req-e0a95887-8043-4c33-bca7-5bfb81d4ca65 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f could not be found. [ 1986.329584] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-e0a95887-8043-4c33-bca7-5bfb81d4ca65 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1986.329784] env[61215]: INFO nova.compute.manager [None req-e0a95887-8043-4c33-bca7-5bfb81d4ca65 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1986.330064] env[61215]: DEBUG oslo.service.loopingcall [None req-e0a95887-8043-4c33-bca7-5bfb81d4ca65 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1986.334558] env[61215]: DEBUG nova.compute.manager [-] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 1986.334678] env[61215]: DEBUG nova.network.neutron [-] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1986.349025] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1986.413661] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb06eda8-a164-41bc-9766-9ecc00d77ccd {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.421348] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68378a9a-ac8a-4859-9493-9e4e210ac7e2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.452214] env[61215]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61215) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1986.452468] env[61215]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-3cac0a95-1d18-4db5-b18d-4927696aaa9e'] [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1986.453176] env[61215]: ERROR oslo.service.loopingcall [ 1986.454341] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76d9be07-c292-4843-9968-9959c26d45d1 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.456892] env[61215]: ERROR nova.compute.manager [None req-e0a95887-8043-4c33-bca7-5bfb81d4ca65 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1986.465619] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf28e6b2-4cbf-4586-b167-65fcedebbd57 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.485651] env[61215]: DEBUG nova.compute.provider_tree [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1986.496598] env[61215]: ERROR nova.compute.manager [None req-e0a95887-8043-4c33-bca7-5bfb81d4ca65 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1986.496598] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Traceback (most recent call last): [ 1986.496598] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1986.496598] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] ret = obj(*args, **kwargs) [ 1986.496598] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1986.496598] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] exception_handler_v20(status_code, error_body) [ 1986.496598] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1986.496598] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] raise client_exc(message=error_message, [ 1986.496598] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1986.496598] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Neutron server returns request_ids: ['req-3cac0a95-1d18-4db5-b18d-4927696aaa9e'] [ 1986.496598] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] [ 1986.496598] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] During handling of the above exception, another exception occurred: [ 1986.496598] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] [ 1986.496598] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Traceback (most recent call last): [ 1986.496598] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1986.496598] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] self._delete_instance(context, instance, bdms) [ 1986.496598] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1986.496598] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] self._shutdown_instance(context, instance, bdms) [ 1986.496598] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1986.496598] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] self._try_deallocate_network(context, instance, requested_networks) [ 1986.496598] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1986.496598] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] with excutils.save_and_reraise_exception(): [ 1986.496598] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1986.496598] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] self.force_reraise() [ 1986.496598] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1986.496598] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] raise self.value [ 1986.496598] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1986.496598] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] _deallocate_network_with_retries() [ 1986.496598] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1986.496598] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] return evt.wait() [ 1986.496598] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1986.496598] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] result = hub.switch() [ 1986.496598] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1986.496598] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] return self.greenlet.switch() [ 1986.496598] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1986.496598] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] result = func(*self.args, **self.kw) [ 1986.496598] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1986.497521] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] result = f(*args, **kwargs) [ 1986.497521] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1986.497521] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] self._deallocate_network( [ 1986.497521] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1986.497521] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] self.network_api.deallocate_for_instance( [ 1986.497521] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1986.497521] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] data = neutron.list_ports(**search_opts) [ 1986.497521] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1986.497521] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] ret = obj(*args, **kwargs) [ 1986.497521] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1986.497521] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] return self.list('ports', self.ports_path, retrieve_all, [ 1986.497521] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1986.497521] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] ret = obj(*args, **kwargs) [ 1986.497521] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1986.497521] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] for r in self._pagination(collection, path, **params): [ 1986.497521] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1986.497521] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] res = self.get(path, params=params) [ 1986.497521] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1986.497521] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] ret = obj(*args, **kwargs) [ 1986.497521] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1986.497521] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] return self.retry_request("GET", action, body=body, [ 1986.497521] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1986.497521] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] ret = obj(*args, **kwargs) [ 1986.497521] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1986.497521] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] return self.do_request(method, action, body=body, [ 1986.497521] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1986.497521] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] ret = obj(*args, **kwargs) [ 1986.497521] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1986.497521] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] self._handle_fault_response(status_code, replybody, resp) [ 1986.497521] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1986.497521] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1986.497521] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1986.497521] env[61215]: ERROR nova.compute.manager [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] [ 1986.499186] env[61215]: DEBUG nova.scheduler.client.report [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1986.517364] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.427s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1986.517870] env[61215]: DEBUG nova.compute.manager [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Start building networks asynchronously for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1986.520438] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.172s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1986.521846] env[61215]: INFO nova.compute.claims [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1986.525775] env[61215]: DEBUG oslo_concurrency.lockutils [None req-e0a95887-8043-4c33-bca7-5bfb81d4ca65 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Lock "3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.249s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1986.526902] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 153.560s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1986.527150] env[61215]: INFO nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] During sync_power_state the instance has a pending task (deleting). Skip. [ 1986.527371] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1986.554579] env[61215]: DEBUG nova.compute.utils [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Using /dev/sd instead of None {{(pid=61215) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1986.555889] env[61215]: DEBUG nova.compute.manager [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Not allocating networking since 'none' was specified. {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1969}} [ 1986.564190] env[61215]: DEBUG nova.compute.manager [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Start building block device mappings for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1986.616266] env[61215]: INFO nova.compute.manager [None req-e0a95887-8043-4c33-bca7-5bfb81d4ca65 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] [instance: 3f39741c-19a9-4dc2-8f0c-14a2bbaa7d6f] Successfully reverted task state from None on failure for instance. [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server [None req-e0a95887-8043-4c33-bca7-5bfb81d4ca65 tempest-DeleteServersAdminTestJSON-641029649 tempest-DeleteServersAdminTestJSON-641029649-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-3cac0a95-1d18-4db5-b18d-4927696aaa9e'] [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server raise self.value [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server raise self.value [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server raise self.value [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3345, in terminate_instance [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in do_terminate_instance [ 1986.620604] env[61215]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server raise self.value [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server raise self.value [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1986.621988] env[61215]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1986.623291] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1986.623291] env[61215]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1986.623291] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1986.623291] env[61215]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1986.623291] env[61215]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1986.623291] env[61215]: ERROR oslo_messaging.rpc.server [ 1986.639031] env[61215]: DEBUG nova.compute.manager [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Start spawning the instance on the hypervisor. {{(pid=61215) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1986.664043] env[61215]: DEBUG nova.virt.hardware [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:05:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:04:45Z,direct_url=,disk_format='vmdk',id=e91f0c25-9ff9-4937-8440-f47cfb2028bc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9bc3f57c82894c5d9e08b66e77a25dc5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:04:46Z,virtual_size=,visibility=), allow threads: False {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1986.664309] env[61215]: DEBUG nova.virt.hardware [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Flavor limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1986.664473] env[61215]: DEBUG nova.virt.hardware [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Image limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1986.664697] env[61215]: DEBUG nova.virt.hardware [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Flavor pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1986.664855] env[61215]: DEBUG nova.virt.hardware [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Image pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1986.665015] env[61215]: DEBUG nova.virt.hardware [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1986.665238] env[61215]: DEBUG nova.virt.hardware [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1986.665402] env[61215]: DEBUG nova.virt.hardware [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1986.665570] env[61215]: DEBUG nova.virt.hardware [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Got 1 possible topologies {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1986.666070] env[61215]: DEBUG nova.virt.hardware [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1986.666070] env[61215]: DEBUG nova.virt.hardware [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1986.666840] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a767a2dd-518b-481c-a0b1-b4d420d18080 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.675828] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c89c975-69d1-4ce3-ba6e-0cd13246dbf3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.691443] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Instance VIF info [] {{(pid=61215) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1986.696938] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Creating folder: Project (278d8035dc0b4ff29bdac68dffb74102). Parent ref: group-v352463. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1986.697238] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7d5e8925-0a09-463b-812b-a2ee7fb7b625 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.706444] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Created folder: Project (278d8035dc0b4ff29bdac68dffb74102) in parent group-v352463. [ 1986.706631] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Creating folder: Instances. Parent ref: group-v352542. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1986.707151] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8fa543d3-6dac-43e2-a1eb-f6c095d2227b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.716025] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Created folder: Instances in parent group-v352542. [ 1986.716294] env[61215]: DEBUG oslo.service.loopingcall [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1986.716488] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Creating VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1986.716684] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-32fcc6e5-a826-4bfe-815c-6f5ecfe6dc50 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.736051] env[61215]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1986.736051] env[61215]: value = "task-1690395" [ 1986.736051] env[61215]: _type = "Task" [ 1986.736051] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1986.743398] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690395, 'name': CreateVM_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1986.812604] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beba8729-7573-467c-b532-6ca6928f1db3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.820044] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a43d65e-c0bf-490a-8bff-36edf3846274 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.851250] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad7c1b09-a12f-47f8-9470-05b68152c0b0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.858578] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bb467a4-b1d3-4f17-bdff-4ccec375067e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.871411] env[61215]: DEBUG nova.compute.provider_tree [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1986.880073] env[61215]: DEBUG nova.scheduler.client.report [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1986.896650] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.376s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1986.897290] env[61215]: DEBUG nova.compute.manager [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Start building networks asynchronously for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 1986.930064] env[61215]: DEBUG nova.compute.utils [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Using /dev/sd instead of None {{(pid=61215) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1986.931602] env[61215]: DEBUG nova.compute.manager [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Not allocating networking since 'none' was specified. {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1969}} [ 1986.940896] env[61215]: DEBUG nova.compute.manager [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Start building block device mappings for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 1987.014368] env[61215]: DEBUG nova.compute.manager [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Start spawning the instance on the hypervisor. {{(pid=61215) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 1987.034501] env[61215]: DEBUG nova.virt.hardware [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:05:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:04:45Z,direct_url=,disk_format='vmdk',id=e91f0c25-9ff9-4937-8440-f47cfb2028bc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9bc3f57c82894c5d9e08b66e77a25dc5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:04:46Z,virtual_size=,visibility=), allow threads: False {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1987.034753] env[61215]: DEBUG nova.virt.hardware [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Flavor limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1987.034916] env[61215]: DEBUG nova.virt.hardware [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Image limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1987.035119] env[61215]: DEBUG nova.virt.hardware [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Flavor pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1987.035274] env[61215]: DEBUG nova.virt.hardware [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Image pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1987.035424] env[61215]: DEBUG nova.virt.hardware [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1987.035633] env[61215]: DEBUG nova.virt.hardware [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1987.035795] env[61215]: DEBUG nova.virt.hardware [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1987.035963] env[61215]: DEBUG nova.virt.hardware [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Got 1 possible topologies {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1987.036147] env[61215]: DEBUG nova.virt.hardware [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1987.036332] env[61215]: DEBUG nova.virt.hardware [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1987.037396] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-309ad618-b80f-417e-a564-1d98c5bb1662 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.044932] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07ce3a4d-b36d-4443-bb05-2f195f75791c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.057924] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Instance VIF info [] {{(pid=61215) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1987.063416] env[61215]: DEBUG oslo.service.loopingcall [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1987.063624] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Creating VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1987.063820] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4622c521-0812-4c4a-8a96-e95612f6705a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.080261] env[61215]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1987.080261] env[61215]: value = "task-1690396" [ 1987.080261] env[61215]: _type = "Task" [ 1987.080261] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1987.087318] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690396, 'name': CreateVM_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1987.246234] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690395, 'name': CreateVM_Task, 'duration_secs': 0.2516} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1987.246548] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Created VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1987.247027] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1987.247205] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1987.247602] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1987.247894] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f3942385-6d35-490d-9176-928ef5ef66c4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.252740] env[61215]: DEBUG oslo_vmware.api [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Waiting for the task: (returnval){ [ 1987.252740] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]5249e163-fdfe-b178-58ba-c8bc4283291c" [ 1987.252740] env[61215]: _type = "Task" [ 1987.252740] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1987.261855] env[61215]: DEBUG oslo_vmware.api [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5249e163-fdfe-b178-58ba-c8bc4283291c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1987.590961] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690396, 'name': CreateVM_Task, 'duration_secs': 0.2473} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1987.590961] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Created VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1987.591299] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1987.763161] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1987.763454] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Processing image e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1987.763733] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1987.764015] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1987.764383] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1987.764673] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29d576bc-a51c-4e93-a84c-b12f71389f98 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.769465] env[61215]: DEBUG oslo_vmware.api [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Waiting for the task: (returnval){ [ 1987.769465] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]528f9faa-38b4-249a-10a1-26fe62ff40e8" [ 1987.769465] env[61215]: _type = "Task" [ 1987.769465] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1987.778758] env[61215]: DEBUG oslo_vmware.api [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]528f9faa-38b4-249a-10a1-26fe62ff40e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1988.279877] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1988.280160] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Processing image e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1988.280421] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1993.262111] env[61215]: DEBUG oslo_concurrency.lockutils [None req-e8f5f49a-d479-41f1-86b3-d6c8369076c5 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Acquiring lock "0d609df2-621c-456f-b8ce-a209e9052153" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2002.125722] env[61215]: DEBUG oslo_concurrency.lockutils [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Acquiring lock "455e7272-f099-496f-b929-ed6fa9a0ab44" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2002.126009] env[61215]: DEBUG oslo_concurrency.lockutils [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Lock "455e7272-f099-496f-b929-ed6fa9a0ab44" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2034.230165] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2034.472636] env[61215]: WARNING oslo_vmware.rw_handles [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2034.472636] env[61215]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2034.472636] env[61215]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2034.472636] env[61215]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2034.472636] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2034.472636] env[61215]: ERROR oslo_vmware.rw_handles response.begin() [ 2034.472636] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2034.472636] env[61215]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2034.472636] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2034.472636] env[61215]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2034.472636] env[61215]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2034.472636] env[61215]: ERROR oslo_vmware.rw_handles [ 2034.472990] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Downloaded image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to vmware_temp/c5507bd7-4d17-4bb6-b3c9-8d49c3d3c52a/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2034.475129] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Caching image {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2034.475386] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Copying Virtual Disk [datastore1] vmware_temp/c5507bd7-4d17-4bb6-b3c9-8d49c3d3c52a/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk to [datastore1] vmware_temp/c5507bd7-4d17-4bb6-b3c9-8d49c3d3c52a/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk {{(pid=61215) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2034.475681] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-38ae823e-016a-40da-acba-127d25ddacbc {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.484219] env[61215]: DEBUG oslo_vmware.api [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Waiting for the task: (returnval){ [ 2034.484219] env[61215]: value = "task-1690397" [ 2034.484219] env[61215]: _type = "Task" [ 2034.484219] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2034.491842] env[61215]: DEBUG oslo_vmware.api [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Task: {'id': task-1690397, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2034.995898] env[61215]: DEBUG oslo_vmware.exceptions [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Fault InvalidArgument not matched. {{(pid=61215) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2034.996198] env[61215]: DEBUG oslo_concurrency.lockutils [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2034.996758] env[61215]: ERROR nova.compute.manager [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2034.996758] env[61215]: Faults: ['InvalidArgument'] [ 2034.996758] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Traceback (most recent call last): [ 2034.996758] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2034.996758] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] yield resources [ 2034.996758] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2034.996758] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] self.driver.spawn(context, instance, image_meta, [ 2034.996758] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2034.996758] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2034.996758] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2034.996758] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] self._fetch_image_if_missing(context, vi) [ 2034.996758] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2034.996758] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] image_cache(vi, tmp_image_ds_loc) [ 2034.996758] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2034.996758] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] vm_util.copy_virtual_disk( [ 2034.996758] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2034.996758] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] session._wait_for_task(vmdk_copy_task) [ 2034.996758] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2034.996758] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] return self.wait_for_task(task_ref) [ 2034.996758] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2034.996758] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] return evt.wait() [ 2034.996758] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2034.996758] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] result = hub.switch() [ 2034.996758] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2034.996758] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] return self.greenlet.switch() [ 2034.996758] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2034.996758] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] self.f(*self.args, **self.kw) [ 2034.996758] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2034.996758] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] raise exceptions.translate_fault(task_info.error) [ 2034.996758] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2034.996758] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Faults: ['InvalidArgument'] [ 2034.996758] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] [ 2034.997688] env[61215]: INFO nova.compute.manager [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Terminating instance [ 2034.998655] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2034.998880] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2034.999151] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-255a800b-e2bf-4247-ac11-674e0df142fe {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.001314] env[61215]: DEBUG nova.compute.manager [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2035.001512] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2035.002239] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c26eb8a4-4681-4589-866d-1dd4cce3ded3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.009186] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Unregistering the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2035.009394] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b152ea39-8850-4f4c-a8df-7429b25bc7cf {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.011513] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2035.011694] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61215) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2035.012651] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d9d33e8-8f95-48e0-bf82-ac562189bdeb {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.017297] env[61215]: DEBUG oslo_vmware.api [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Waiting for the task: (returnval){ [ 2035.017297] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]5261648e-c609-f74f-d218-ad6f109d9192" [ 2035.017297] env[61215]: _type = "Task" [ 2035.017297] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2035.024436] env[61215]: DEBUG oslo_vmware.api [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5261648e-c609-f74f-d218-ad6f109d9192, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2035.083064] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Unregistered the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2035.083064] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Deleting contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2035.083263] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Deleting the datastore file [datastore1] 8d4665c7-67de-4ab3-a8b7-596a5e1152ce {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2035.083478] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d4a0bbba-0e9c-44c3-9080-4eb28f4fa3bf {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.089875] env[61215]: DEBUG oslo_vmware.api [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Waiting for the task: (returnval){ [ 2035.089875] env[61215]: value = "task-1690399" [ 2035.089875] env[61215]: _type = "Task" [ 2035.089875] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2035.097372] env[61215]: DEBUG oslo_vmware.api [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Task: {'id': task-1690399, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2035.528146] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Preparing fetch location {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2035.528456] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Creating directory with path [datastore1] vmware_temp/e8ef57e8-cfa1-4172-ac5e-ddd8b912d4da/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2035.528639] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-73bf9e34-f50e-4bde-87ce-3e34de54fb7c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.543487] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Created directory with path [datastore1] vmware_temp/e8ef57e8-cfa1-4172-ac5e-ddd8b912d4da/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2035.543860] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Fetch image to [datastore1] vmware_temp/e8ef57e8-cfa1-4172-ac5e-ddd8b912d4da/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2035.543986] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to [datastore1] vmware_temp/e8ef57e8-cfa1-4172-ac5e-ddd8b912d4da/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2035.544647] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fceb9345-f12a-45db-9a81-954f95a58523 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.551012] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e8138a8-3195-4d78-bb17-b22136a3658a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.560129] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fd72554-551c-4b80-9680-b2bde66e6525 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.590059] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69618fd1-a1fd-4eb4-9822-c6215e4956ec {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.601923] env[61215]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-3d36612b-b2fb-4493-acbe-8795751a4ff8 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.603663] env[61215]: DEBUG oslo_vmware.api [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Task: {'id': task-1690399, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074118} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2035.603897] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2035.604090] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Deleted contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2035.604263] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2035.604434] env[61215]: INFO nova.compute.manager [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2035.606510] env[61215]: DEBUG nova.compute.claims [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Aborting claim: {{(pid=61215) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2035.606700] env[61215]: DEBUG oslo_concurrency.lockutils [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2035.606916] env[61215]: DEBUG oslo_concurrency.lockutils [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2035.626116] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2035.679900] env[61215]: DEBUG oslo_vmware.rw_handles [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e8ef57e8-cfa1-4172-ac5e-ddd8b912d4da/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2035.741689] env[61215]: DEBUG oslo_vmware.rw_handles [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Completed reading data from the image iterator. {{(pid=61215) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2035.741900] env[61215]: DEBUG oslo_vmware.rw_handles [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e8ef57e8-cfa1-4172-ac5e-ddd8b912d4da/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2035.920885] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab9db639-50bd-491c-8c7f-49c49eb909e5 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.928402] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5672c22-81a6-425f-9a8b-9f21be7c71ef {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.958306] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7cfe220-7f61-4ca0-9e8f-3072f01c9da2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.965730] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29eec44b-dc3d-400b-b2d9-837b30e25802 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.978636] env[61215]: DEBUG nova.compute.provider_tree [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2035.986759] env[61215]: DEBUG nova.scheduler.client.report [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2037.886911] env[61215]: DEBUG oslo_concurrency.lockutils [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.280s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2037.887497] env[61215]: ERROR nova.compute.manager [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2037.887497] env[61215]: Faults: ['InvalidArgument'] [ 2037.887497] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Traceback (most recent call last): [ 2037.887497] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2037.887497] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] self.driver.spawn(context, instance, image_meta, [ 2037.887497] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2037.887497] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2037.887497] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2037.887497] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] self._fetch_image_if_missing(context, vi) [ 2037.887497] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2037.887497] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] image_cache(vi, tmp_image_ds_loc) [ 2037.887497] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2037.887497] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] vm_util.copy_virtual_disk( [ 2037.887497] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2037.887497] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] session._wait_for_task(vmdk_copy_task) [ 2037.887497] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2037.887497] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] return self.wait_for_task(task_ref) [ 2037.887497] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2037.887497] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] return evt.wait() [ 2037.887497] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2037.887497] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] result = hub.switch() [ 2037.887497] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2037.887497] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] return self.greenlet.switch() [ 2037.887497] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2037.887497] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] self.f(*self.args, **self.kw) [ 2037.887497] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2037.887497] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] raise exceptions.translate_fault(task_info.error) [ 2037.887497] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2037.887497] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Faults: ['InvalidArgument'] [ 2037.887497] env[61215]: ERROR nova.compute.manager [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] [ 2037.888330] env[61215]: DEBUG nova.compute.utils [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] VimFaultException {{(pid=61215) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2037.890096] env[61215]: DEBUG nova.compute.manager [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Build of instance 8d4665c7-67de-4ab3-a8b7-596a5e1152ce was re-scheduled: A specified parameter was not correct: fileType [ 2037.890096] env[61215]: Faults: ['InvalidArgument'] {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2037.890507] env[61215]: DEBUG nova.compute.manager [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Unplugging VIFs for instance {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2037.890702] env[61215]: DEBUG nova.compute.manager [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2037.890895] env[61215]: DEBUG nova.compute.manager [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2037.891077] env[61215]: DEBUG nova.network.neutron [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2038.337312] env[61215]: DEBUG nova.network.neutron [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2038.350392] env[61215]: INFO nova.compute.manager [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Took 0.46 seconds to deallocate network for instance. [ 2038.457795] env[61215]: INFO nova.scheduler.client.report [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Deleted allocations for instance 8d4665c7-67de-4ab3-a8b7-596a5e1152ce [ 2038.477771] env[61215]: DEBUG oslo_concurrency.lockutils [None req-2c3836ac-dcb9-454b-bd34-c560db9d600f tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Lock "8d4665c7-67de-4ab3-a8b7-596a5e1152ce" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 583.369s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2038.478860] env[61215]: DEBUG oslo_concurrency.lockutils [None req-3e71cd7a-4acb-41f0-b94e-c60ace9c155c tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Lock "8d4665c7-67de-4ab3-a8b7-596a5e1152ce" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 387.149s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2038.479101] env[61215]: DEBUG oslo_concurrency.lockutils [None req-3e71cd7a-4acb-41f0-b94e-c60ace9c155c tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Acquiring lock "8d4665c7-67de-4ab3-a8b7-596a5e1152ce-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2038.479312] env[61215]: DEBUG oslo_concurrency.lockutils [None req-3e71cd7a-4acb-41f0-b94e-c60ace9c155c tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Lock "8d4665c7-67de-4ab3-a8b7-596a5e1152ce-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2038.479479] env[61215]: DEBUG oslo_concurrency.lockutils [None req-3e71cd7a-4acb-41f0-b94e-c60ace9c155c tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Lock "8d4665c7-67de-4ab3-a8b7-596a5e1152ce-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2038.481491] env[61215]: INFO nova.compute.manager [None req-3e71cd7a-4acb-41f0-b94e-c60ace9c155c tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Terminating instance [ 2038.483202] env[61215]: DEBUG nova.compute.manager [None req-3e71cd7a-4acb-41f0-b94e-c60ace9c155c tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2038.483391] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-3e71cd7a-4acb-41f0-b94e-c60ace9c155c tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2038.483903] env[61215]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-020501bd-80e0-4128-bcce-407e866675d4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.492976] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33b0493e-fe05-4e94-a59a-5c8fd1872eed {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.503882] env[61215]: DEBUG nova.compute.manager [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2038.523341] env[61215]: WARNING nova.virt.vmwareapi.vmops [None req-3e71cd7a-4acb-41f0-b94e-c60ace9c155c tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8d4665c7-67de-4ab3-a8b7-596a5e1152ce could not be found. [ 2038.523542] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-3e71cd7a-4acb-41f0-b94e-c60ace9c155c tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2038.523748] env[61215]: INFO nova.compute.manager [None req-3e71cd7a-4acb-41f0-b94e-c60ace9c155c tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2038.523992] env[61215]: DEBUG oslo.service.loopingcall [None req-3e71cd7a-4acb-41f0-b94e-c60ace9c155c tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2038.524234] env[61215]: DEBUG nova.compute.manager [-] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2038.524334] env[61215]: DEBUG nova.network.neutron [-] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2038.548606] env[61215]: DEBUG nova.network.neutron [-] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2038.552700] env[61215]: DEBUG oslo_concurrency.lockutils [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2038.552951] env[61215]: DEBUG oslo_concurrency.lockutils [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2038.554360] env[61215]: INFO nova.compute.claims [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2038.557425] env[61215]: INFO nova.compute.manager [-] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] Took 0.03 seconds to deallocate network for instance. [ 2038.646035] env[61215]: DEBUG oslo_concurrency.lockutils [None req-3e71cd7a-4acb-41f0-b94e-c60ace9c155c tempest-FloatingIPsAssociationNegativeTestJSON-1466971338 tempest-FloatingIPsAssociationNegativeTestJSON-1466971338-project-member] Lock "8d4665c7-67de-4ab3-a8b7-596a5e1152ce" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.167s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2038.646934] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "8d4665c7-67de-4ab3-a8b7-596a5e1152ce" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 205.680s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2038.647127] env[61215]: INFO nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 8d4665c7-67de-4ab3-a8b7-596a5e1152ce] During sync_power_state the instance has a pending task (deleting). Skip. [ 2038.647307] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "8d4665c7-67de-4ab3-a8b7-596a5e1152ce" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2038.653556] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2038.653779] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2038.788440] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1ba7f74-4b3f-49d9-b72c-98c83fafd415 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.796382] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07fa24da-ff6f-4f83-b573-aa0ce4900ce3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.828083] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35d4631b-5742-4e7d-bcb3-639d01b20793 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.834733] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b132e588-252f-473f-ba00-f00abf017222 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.847312] env[61215]: DEBUG nova.compute.provider_tree [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2038.855881] env[61215]: DEBUG nova.scheduler.client.report [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2038.868902] env[61215]: DEBUG oslo_concurrency.lockutils [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.316s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2038.869366] env[61215]: DEBUG nova.compute.manager [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Start building networks asynchronously for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2038.904434] env[61215]: DEBUG nova.compute.utils [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Using /dev/sd instead of None {{(pid=61215) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2038.906648] env[61215]: DEBUG nova.compute.manager [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Allocating IP information in the background. {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2038.906842] env[61215]: DEBUG nova.network.neutron [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] allocate_for_instance() {{(pid=61215) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2038.919835] env[61215]: DEBUG nova.compute.manager [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Start building block device mappings for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2038.992256] env[61215]: DEBUG nova.policy [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '27ff5932b5b64df087457974b83bba92', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '919d66c0b792490694750f6760a90114', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61215) authorize /opt/stack/nova/nova/policy.py:203}} [ 2038.995502] env[61215]: DEBUG nova.compute.manager [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Start spawning the instance on the hypervisor. {{(pid=61215) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2039.022555] env[61215]: DEBUG nova.virt.hardware [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:05:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:04:45Z,direct_url=,disk_format='vmdk',id=e91f0c25-9ff9-4937-8440-f47cfb2028bc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9bc3f57c82894c5d9e08b66e77a25dc5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:04:46Z,virtual_size=,visibility=), allow threads: False {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2039.022810] env[61215]: DEBUG nova.virt.hardware [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Flavor limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2039.022971] env[61215]: DEBUG nova.virt.hardware [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Image limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2039.023178] env[61215]: DEBUG nova.virt.hardware [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Flavor pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2039.023330] env[61215]: DEBUG nova.virt.hardware [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Image pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2039.023522] env[61215]: DEBUG nova.virt.hardware [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2039.023711] env[61215]: DEBUG nova.virt.hardware [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2039.023871] env[61215]: DEBUG nova.virt.hardware [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2039.024051] env[61215]: DEBUG nova.virt.hardware [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Got 1 possible topologies {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2039.024225] env[61215]: DEBUG nova.virt.hardware [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2039.024401] env[61215]: DEBUG nova.virt.hardware [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2039.025277] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5689df05-fdcf-4b1e-afe9-491c6f6b2138 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.032907] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e674ecf7-452f-4a72-96f9-079812a4f3cf {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.626114] env[61215]: DEBUG nova.network.neutron [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Successfully created port: 39679140-066a-414b-883d-9f820b49e45e {{(pid=61215) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2040.361360] env[61215]: DEBUG nova.network.neutron [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Successfully updated port: 39679140-066a-414b-883d-9f820b49e45e {{(pid=61215) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2040.385585] env[61215]: DEBUG oslo_concurrency.lockutils [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Acquiring lock "refresh_cache-f3a3a510-a085-4388-b49d-b4371095b436" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2040.386945] env[61215]: DEBUG oslo_concurrency.lockutils [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Acquired lock "refresh_cache-f3a3a510-a085-4388-b49d-b4371095b436" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2040.387126] env[61215]: DEBUG nova.network.neutron [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2040.438853] env[61215]: DEBUG nova.compute.manager [req-3724a5e3-ddd3-4adf-831d-21852fa9cb4d req-951db61e-60fb-4230-ab6d-daa0a697a06e service nova] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Received event network-vif-plugged-39679140-066a-414b-883d-9f820b49e45e {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 2040.439088] env[61215]: DEBUG oslo_concurrency.lockutils [req-3724a5e3-ddd3-4adf-831d-21852fa9cb4d req-951db61e-60fb-4230-ab6d-daa0a697a06e service nova] Acquiring lock "f3a3a510-a085-4388-b49d-b4371095b436-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2040.439304] env[61215]: DEBUG oslo_concurrency.lockutils [req-3724a5e3-ddd3-4adf-831d-21852fa9cb4d req-951db61e-60fb-4230-ab6d-daa0a697a06e service nova] Lock "f3a3a510-a085-4388-b49d-b4371095b436-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2040.439472] env[61215]: DEBUG oslo_concurrency.lockutils [req-3724a5e3-ddd3-4adf-831d-21852fa9cb4d req-951db61e-60fb-4230-ab6d-daa0a697a06e service nova] Lock "f3a3a510-a085-4388-b49d-b4371095b436-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2040.439640] env[61215]: DEBUG nova.compute.manager [req-3724a5e3-ddd3-4adf-831d-21852fa9cb4d req-951db61e-60fb-4230-ab6d-daa0a697a06e service nova] [instance: f3a3a510-a085-4388-b49d-b4371095b436] No waiting events found dispatching network-vif-plugged-39679140-066a-414b-883d-9f820b49e45e {{(pid=61215) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2040.439808] env[61215]: WARNING nova.compute.manager [req-3724a5e3-ddd3-4adf-831d-21852fa9cb4d req-951db61e-60fb-4230-ab6d-daa0a697a06e service nova] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Received unexpected event network-vif-plugged-39679140-066a-414b-883d-9f820b49e45e for instance with vm_state building and task_state spawning. [ 2040.446499] env[61215]: DEBUG nova.network.neutron [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2040.654280] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2040.654280] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2040.725859] env[61215]: DEBUG nova.network.neutron [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Updating instance_info_cache with network_info: [{"id": "39679140-066a-414b-883d-9f820b49e45e", "address": "fa:16:3e:96:97:3f", "network": {"id": "33e707ae-ffbe-4208-a3b4-6f45d3a65a85", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2079632580-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "919d66c0b792490694750f6760a90114", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1c797172-a569-458e-aeb0-3f21e589a740", "external-id": "nsx-vlan-transportzone-957", "segmentation_id": 957, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39679140-06", "ovs_interfaceid": "39679140-066a-414b-883d-9f820b49e45e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2040.740562] env[61215]: DEBUG oslo_concurrency.lockutils [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Releasing lock "refresh_cache-f3a3a510-a085-4388-b49d-b4371095b436" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2040.740857] env[61215]: DEBUG nova.compute.manager [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Instance network_info: |[{"id": "39679140-066a-414b-883d-9f820b49e45e", "address": "fa:16:3e:96:97:3f", "network": {"id": "33e707ae-ffbe-4208-a3b4-6f45d3a65a85", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2079632580-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "919d66c0b792490694750f6760a90114", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1c797172-a569-458e-aeb0-3f21e589a740", "external-id": "nsx-vlan-transportzone-957", "segmentation_id": 957, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39679140-06", "ovs_interfaceid": "39679140-066a-414b-883d-9f820b49e45e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2040.741309] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:96:97:3f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1c797172-a569-458e-aeb0-3f21e589a740', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '39679140-066a-414b-883d-9f820b49e45e', 'vif_model': 'vmxnet3'}] {{(pid=61215) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2040.749916] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Creating folder: Project (919d66c0b792490694750f6760a90114). Parent ref: group-v352463. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2040.750515] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f4704899-315a-4ee5-976c-232a4e6926c9 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.763252] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Created folder: Project (919d66c0b792490694750f6760a90114) in parent group-v352463. [ 2040.763479] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Creating folder: Instances. Parent ref: group-v352546. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2040.763715] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d5c255a5-4ebf-4137-8043-88d76753f2e7 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.773580] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Created folder: Instances in parent group-v352546. [ 2040.773807] env[61215]: DEBUG oslo.service.loopingcall [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2040.773991] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Creating VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2040.774207] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e8be423d-28ee-4c37-85df-30bee1cb681e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.793920] env[61215]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2040.793920] env[61215]: value = "task-1690402" [ 2040.793920] env[61215]: _type = "Task" [ 2040.793920] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2040.801933] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690402, 'name': CreateVM_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2041.303948] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690402, 'name': CreateVM_Task, 'duration_secs': 0.291087} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2041.304133] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Created VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2041.304837] env[61215]: DEBUG oslo_concurrency.lockutils [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2041.305015] env[61215]: DEBUG oslo_concurrency.lockutils [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2041.305338] env[61215]: DEBUG oslo_concurrency.lockutils [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2041.305581] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a153e5e-7ff1-4751-9cfb-f24441de10a2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.310346] env[61215]: DEBUG oslo_vmware.api [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Waiting for the task: (returnval){ [ 2041.310346] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52d97961-c3c5-59d5-3279-260f831a8d05" [ 2041.310346] env[61215]: _type = "Task" [ 2041.310346] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2041.322752] env[61215]: DEBUG oslo_vmware.api [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52d97961-c3c5-59d5-3279-260f831a8d05, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2041.654289] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2041.654650] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Starting heal instance info cache {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 2041.654650] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Rebuilding the list of instances to heal {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2041.676754] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2041.676922] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2041.677067] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2041.677200] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2041.677325] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2041.677447] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2041.677567] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2041.677689] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2041.677808] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2041.677926] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2041.678054] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Didn't find any instances for network info cache update. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 2041.678525] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2041.678668] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61215) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 2041.678823] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2041.689775] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2041.689979] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2041.690157] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2041.690309] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61215) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2041.691374] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bd35220-2e16-4f2f-8fc3-2ab08008211f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.700057] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b3eeba2-9e6e-419c-8a55-e079126c3b40 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.713362] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb44a482-569d-419f-afc7-52a1a2c2c955 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.719506] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edbaee6b-cfdc-4f51-9a44-c212f2387d86 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.749861] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181322MB free_disk=173GB free_vcpus=48 pci_devices=None {{(pid=61215) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2041.750008] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2041.750206] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2041.821101] env[61215]: DEBUG oslo_concurrency.lockutils [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2041.821377] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Processing image e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2041.821594] env[61215]: DEBUG oslo_concurrency.lockutils [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2041.849962] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2041.850138] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 2e186217-c1e1-40c6-8d84-988f35f6b93d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2041.850267] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2041.850389] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance c233ab81-232d-49be-a176-bf846f0d8cc3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2041.850509] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance bb56c470-9f85-44b1-b1ec-f44236e9de51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2041.850626] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 38fe96cf-e570-4c0e-af6f-2f199efc06ff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2041.850744] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 0d609df2-621c-456f-b8ce-a209e9052153 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2041.850860] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 66420497-c0f6-4f1d-86ee-23d53400e325 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2041.850975] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 59d93243-c15c-4554-863b-779d94b3d858 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2041.851101] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance f3a3a510-a085-4388-b49d-b4371095b436 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2041.861685] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance ea0fe7f8-9070-4b17-bc36-d65c62a18923 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2041.871859] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 17d70ed3-4a82-48c8-95ad-c81fb0772e42 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2041.881125] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance be5cf1b4-da97-4944-bb38-f10943576b8f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2041.890428] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 3a7e93d3-cef2-4b3f-a20b-da373780e1c8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2041.899733] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 90f1ad20-b501-4f1e-95ff-1d428c51c242 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2041.909182] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 02e32086-8fe7-4def-ac71-7c4c43ee0f23 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2041.918344] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance fb5fb791-5f62-4717-8d8f-7d56ffda15be has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2041.928037] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 455e7272-f099-496f-b929-ed6fa9a0ab44 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2041.928271] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2041.928420] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2042.117444] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c498152f-9fa1-4c0c-81a0-1d91d405b953 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.125049] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f567001-0c71-4d20-b605-733a1587f1f9 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.154385] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e4f86a3-1689-4c8f-bee6-7d0dbd10e12c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.161877] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09c5f91f-05a6-45f8-9784-1362c0f099b1 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.175706] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2042.184261] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2042.202243] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61215) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2042.202427] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.452s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2042.477022] env[61215]: DEBUG nova.compute.manager [req-d700c17a-969d-47da-b595-03714123bdae req-66d0b578-5d31-4fa9-b285-826f825349e7 service nova] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Received event network-changed-39679140-066a-414b-883d-9f820b49e45e {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 2042.477022] env[61215]: DEBUG nova.compute.manager [req-d700c17a-969d-47da-b595-03714123bdae req-66d0b578-5d31-4fa9-b285-826f825349e7 service nova] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Refreshing instance network info cache due to event network-changed-39679140-066a-414b-883d-9f820b49e45e. {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 2042.477273] env[61215]: DEBUG oslo_concurrency.lockutils [req-d700c17a-969d-47da-b595-03714123bdae req-66d0b578-5d31-4fa9-b285-826f825349e7 service nova] Acquiring lock "refresh_cache-f3a3a510-a085-4388-b49d-b4371095b436" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2042.477347] env[61215]: DEBUG oslo_concurrency.lockutils [req-d700c17a-969d-47da-b595-03714123bdae req-66d0b578-5d31-4fa9-b285-826f825349e7 service nova] Acquired lock "refresh_cache-f3a3a510-a085-4388-b49d-b4371095b436" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2042.477513] env[61215]: DEBUG nova.network.neutron [req-d700c17a-969d-47da-b595-03714123bdae req-66d0b578-5d31-4fa9-b285-826f825349e7 service nova] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Refreshing network info cache for port 39679140-066a-414b-883d-9f820b49e45e {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2042.746525] env[61215]: DEBUG nova.network.neutron [req-d700c17a-969d-47da-b595-03714123bdae req-66d0b578-5d31-4fa9-b285-826f825349e7 service nova] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Updated VIF entry in instance network info cache for port 39679140-066a-414b-883d-9f820b49e45e. {{(pid=61215) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2042.747053] env[61215]: DEBUG nova.network.neutron [req-d700c17a-969d-47da-b595-03714123bdae req-66d0b578-5d31-4fa9-b285-826f825349e7 service nova] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Updating instance_info_cache with network_info: [{"id": "39679140-066a-414b-883d-9f820b49e45e", "address": "fa:16:3e:96:97:3f", "network": {"id": "33e707ae-ffbe-4208-a3b4-6f45d3a65a85", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2079632580-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "919d66c0b792490694750f6760a90114", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1c797172-a569-458e-aeb0-3f21e589a740", "external-id": "nsx-vlan-transportzone-957", "segmentation_id": 957, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39679140-06", "ovs_interfaceid": "39679140-066a-414b-883d-9f820b49e45e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2042.757812] env[61215]: DEBUG oslo_concurrency.lockutils [req-d700c17a-969d-47da-b595-03714123bdae req-66d0b578-5d31-4fa9-b285-826f825349e7 service nova] Releasing lock "refresh_cache-f3a3a510-a085-4388-b49d-b4371095b436" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2044.177761] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2045.649392] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2049.842033] env[61215]: DEBUG oslo_concurrency.lockutils [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Acquiring lock "9eacbeb5-b918-4b0f-82f4-d06a037803df" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2049.842368] env[61215]: DEBUG oslo_concurrency.lockutils [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Lock "9eacbeb5-b918-4b0f-82f4-d06a037803df" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2054.150473] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ddcb96b7-24f1-454b-b3f3-62f8319bc4b6 tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Acquiring lock "59d93243-c15c-4554-863b-779d94b3d858" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2077.688198] env[61215]: DEBUG oslo_concurrency.lockutils [None req-7d45be56-94aa-4b72-9c4b-65288d677f30 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Acquiring lock "f3a3a510-a085-4388-b49d-b4371095b436" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2084.309523] env[61215]: WARNING oslo_vmware.rw_handles [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2084.309523] env[61215]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2084.309523] env[61215]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2084.309523] env[61215]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2084.309523] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2084.309523] env[61215]: ERROR oslo_vmware.rw_handles response.begin() [ 2084.309523] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2084.309523] env[61215]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2084.309523] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2084.309523] env[61215]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2084.309523] env[61215]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2084.309523] env[61215]: ERROR oslo_vmware.rw_handles [ 2084.310372] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Downloaded image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to vmware_temp/e8ef57e8-cfa1-4172-ac5e-ddd8b912d4da/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2084.312605] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Caching image {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2084.312908] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Copying Virtual Disk [datastore1] vmware_temp/e8ef57e8-cfa1-4172-ac5e-ddd8b912d4da/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk to [datastore1] vmware_temp/e8ef57e8-cfa1-4172-ac5e-ddd8b912d4da/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk {{(pid=61215) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2084.313249] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0d5e7e5f-154f-4f4d-a49d-efb2f718730a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.321972] env[61215]: DEBUG oslo_vmware.api [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Waiting for the task: (returnval){ [ 2084.321972] env[61215]: value = "task-1690403" [ 2084.321972] env[61215]: _type = "Task" [ 2084.321972] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2084.329912] env[61215]: DEBUG oslo_vmware.api [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Task: {'id': task-1690403, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2084.833282] env[61215]: DEBUG oslo_vmware.exceptions [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Fault InvalidArgument not matched. {{(pid=61215) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2084.833575] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2084.834172] env[61215]: ERROR nova.compute.manager [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2084.834172] env[61215]: Faults: ['InvalidArgument'] [ 2084.834172] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Traceback (most recent call last): [ 2084.834172] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2084.834172] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] yield resources [ 2084.834172] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2084.834172] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] self.driver.spawn(context, instance, image_meta, [ 2084.834172] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2084.834172] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2084.834172] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2084.834172] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] self._fetch_image_if_missing(context, vi) [ 2084.834172] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2084.834172] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] image_cache(vi, tmp_image_ds_loc) [ 2084.834172] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2084.834172] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] vm_util.copy_virtual_disk( [ 2084.834172] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2084.834172] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] session._wait_for_task(vmdk_copy_task) [ 2084.834172] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2084.834172] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] return self.wait_for_task(task_ref) [ 2084.834172] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2084.834172] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] return evt.wait() [ 2084.834172] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2084.834172] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] result = hub.switch() [ 2084.834172] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2084.834172] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] return self.greenlet.switch() [ 2084.834172] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2084.834172] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] self.f(*self.args, **self.kw) [ 2084.834172] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2084.834172] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] raise exceptions.translate_fault(task_info.error) [ 2084.834172] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2084.834172] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Faults: ['InvalidArgument'] [ 2084.834172] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] [ 2084.835743] env[61215]: INFO nova.compute.manager [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Terminating instance [ 2084.836118] env[61215]: DEBUG oslo_concurrency.lockutils [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2084.836313] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2084.836945] env[61215]: DEBUG nova.compute.manager [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2084.837151] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2084.837389] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-08309b42-e3b0-4c50-82ff-24b4c697eacb {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.839723] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29df908b-29bf-4613-9b17-6650e7f9443e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.846755] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Unregistering the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2084.846977] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f5ebbfc4-2dfa-4f8f-8b9e-776d7f06009d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.849153] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2084.849371] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61215) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2084.850363] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-017db855-a5d8-4c27-a18e-243b148fd6a3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.855028] env[61215]: DEBUG oslo_vmware.api [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Waiting for the task: (returnval){ [ 2084.855028] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52bd24fa-6a51-c9a4-0dee-64239413dc2c" [ 2084.855028] env[61215]: _type = "Task" [ 2084.855028] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2084.866260] env[61215]: DEBUG oslo_vmware.api [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52bd24fa-6a51-c9a4-0dee-64239413dc2c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2084.926546] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Unregistered the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2084.926801] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Deleting contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2084.926990] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Deleting the datastore file [datastore1] c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2084.927397] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-11daa833-6b9a-4611-a73f-221e28ad960e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.933970] env[61215]: DEBUG oslo_vmware.api [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Waiting for the task: (returnval){ [ 2084.933970] env[61215]: value = "task-1690405" [ 2084.933970] env[61215]: _type = "Task" [ 2084.933970] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2084.942652] env[61215]: DEBUG oslo_vmware.api [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Task: {'id': task-1690405, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2085.365521] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Preparing fetch location {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2085.365852] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Creating directory with path [datastore1] vmware_temp/05428117-3383-4382-8b18-a01560f451c3/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2085.365903] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c939ab3d-2e11-44cb-a219-8f6da5215514 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.377257] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Created directory with path [datastore1] vmware_temp/05428117-3383-4382-8b18-a01560f451c3/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2085.377461] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Fetch image to [datastore1] vmware_temp/05428117-3383-4382-8b18-a01560f451c3/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2085.377634] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to [datastore1] vmware_temp/05428117-3383-4382-8b18-a01560f451c3/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2085.378394] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99e8db6e-6648-43de-a485-4bca37677401 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.384739] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0297bd58-42d2-4f06-b52a-f1a38087ce95 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.393501] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-119a361d-5348-47e6-93cb-93126fc41981 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.424831] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c30e955d-a83b-411c-baa4-c53b9e6a82aa {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.430360] env[61215]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-991157d2-16c2-4f2d-ae3f-c78a57b4dd46 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.442557] env[61215]: DEBUG oslo_vmware.api [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Task: {'id': task-1690405, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076645} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2085.442827] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2085.443046] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Deleted contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2085.443305] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2085.443547] env[61215]: INFO nova.compute.manager [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2085.445796] env[61215]: DEBUG nova.compute.claims [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Aborting claim: {{(pid=61215) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2085.446015] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2085.446306] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2085.450638] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2085.502037] env[61215]: DEBUG oslo_vmware.rw_handles [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/05428117-3383-4382-8b18-a01560f451c3/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2085.564023] env[61215]: DEBUG oslo_vmware.rw_handles [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Completed reading data from the image iterator. {{(pid=61215) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2085.564896] env[61215]: DEBUG oslo_vmware.rw_handles [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/05428117-3383-4382-8b18-a01560f451c3/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2085.745909] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d30e1eed-57f9-436c-b589-30239bd95aaf {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.753290] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01c5e026-6ab3-4b19-aa84-cc0ab9db1942 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.782635] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8f0226f-8237-4348-8e72-0978db9b9140 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.790016] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-629cb299-8c69-4ec4-a926-c5572747f98f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.803930] env[61215]: DEBUG nova.compute.provider_tree [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2085.812472] env[61215]: DEBUG nova.scheduler.client.report [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2085.829297] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.383s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2085.829836] env[61215]: ERROR nova.compute.manager [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2085.829836] env[61215]: Faults: ['InvalidArgument'] [ 2085.829836] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Traceback (most recent call last): [ 2085.829836] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2085.829836] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] self.driver.spawn(context, instance, image_meta, [ 2085.829836] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2085.829836] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2085.829836] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2085.829836] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] self._fetch_image_if_missing(context, vi) [ 2085.829836] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2085.829836] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] image_cache(vi, tmp_image_ds_loc) [ 2085.829836] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2085.829836] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] vm_util.copy_virtual_disk( [ 2085.829836] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2085.829836] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] session._wait_for_task(vmdk_copy_task) [ 2085.829836] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2085.829836] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] return self.wait_for_task(task_ref) [ 2085.829836] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2085.829836] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] return evt.wait() [ 2085.829836] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2085.829836] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] result = hub.switch() [ 2085.829836] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2085.829836] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] return self.greenlet.switch() [ 2085.829836] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2085.829836] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] self.f(*self.args, **self.kw) [ 2085.829836] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2085.829836] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] raise exceptions.translate_fault(task_info.error) [ 2085.829836] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2085.829836] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Faults: ['InvalidArgument'] [ 2085.829836] env[61215]: ERROR nova.compute.manager [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] [ 2085.830651] env[61215]: DEBUG nova.compute.utils [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] VimFaultException {{(pid=61215) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2085.833711] env[61215]: DEBUG nova.compute.manager [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Build of instance c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9 was re-scheduled: A specified parameter was not correct: fileType [ 2085.833711] env[61215]: Faults: ['InvalidArgument'] {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2085.834159] env[61215]: DEBUG nova.compute.manager [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Unplugging VIFs for instance {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2085.834355] env[61215]: DEBUG nova.compute.manager [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2085.834533] env[61215]: DEBUG nova.compute.manager [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2085.834699] env[61215]: DEBUG nova.network.neutron [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2086.226520] env[61215]: DEBUG nova.network.neutron [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2086.238643] env[61215]: INFO nova.compute.manager [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Took 0.40 seconds to deallocate network for instance. [ 2086.336879] env[61215]: INFO nova.scheduler.client.report [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Deleted allocations for instance c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9 [ 2086.358926] env[61215]: DEBUG oslo_concurrency.lockutils [None req-0dc3b9fe-21b7-4db7-b1e9-61549284002d tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Lock "c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 592.398s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2086.360429] env[61215]: DEBUG oslo_concurrency.lockutils [None req-93043f15-32f4-44d4-9654-69a75a32cad9 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Lock "c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 394.635s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2086.360698] env[61215]: DEBUG oslo_concurrency.lockutils [None req-93043f15-32f4-44d4-9654-69a75a32cad9 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquiring lock "c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2086.360971] env[61215]: DEBUG oslo_concurrency.lockutils [None req-93043f15-32f4-44d4-9654-69a75a32cad9 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Lock "c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2086.361172] env[61215]: DEBUG oslo_concurrency.lockutils [None req-93043f15-32f4-44d4-9654-69a75a32cad9 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Lock "c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2086.363190] env[61215]: INFO nova.compute.manager [None req-93043f15-32f4-44d4-9654-69a75a32cad9 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Terminating instance [ 2086.364961] env[61215]: DEBUG nova.compute.manager [None req-93043f15-32f4-44d4-9654-69a75a32cad9 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2086.365191] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-93043f15-32f4-44d4-9654-69a75a32cad9 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2086.365674] env[61215]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-054bde3d-95fd-41d6-89aa-3dcbe6958bd0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.370550] env[61215]: DEBUG nova.compute.manager [None req-45e84f55-b0c5-41f6-b2b3-1bbf70593657 tempest-ServerActionsTestOtherA-1801254831 tempest-ServerActionsTestOtherA-1801254831-project-member] [instance: ea0fe7f8-9070-4b17-bc36-d65c62a18923] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2086.377101] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccfbd219-4486-4086-8eb2-c72825aee95c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.394512] env[61215]: DEBUG nova.compute.manager [None req-45e84f55-b0c5-41f6-b2b3-1bbf70593657 tempest-ServerActionsTestOtherA-1801254831 tempest-ServerActionsTestOtherA-1801254831-project-member] [instance: ea0fe7f8-9070-4b17-bc36-d65c62a18923] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 2086.408234] env[61215]: WARNING nova.virt.vmwareapi.vmops [None req-93043f15-32f4-44d4-9654-69a75a32cad9 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9 could not be found. [ 2086.408491] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-93043f15-32f4-44d4-9654-69a75a32cad9 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2086.408915] env[61215]: INFO nova.compute.manager [None req-93043f15-32f4-44d4-9654-69a75a32cad9 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2086.409107] env[61215]: DEBUG oslo.service.loopingcall [None req-93043f15-32f4-44d4-9654-69a75a32cad9 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2086.409376] env[61215]: DEBUG nova.compute.manager [-] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2086.409478] env[61215]: DEBUG nova.network.neutron [-] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2086.422520] env[61215]: DEBUG oslo_concurrency.lockutils [None req-45e84f55-b0c5-41f6-b2b3-1bbf70593657 tempest-ServerActionsTestOtherA-1801254831 tempest-ServerActionsTestOtherA-1801254831-project-member] Lock "ea0fe7f8-9070-4b17-bc36-d65c62a18923" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.411s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2086.431697] env[61215]: DEBUG nova.compute.manager [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2086.442344] env[61215]: DEBUG nova.network.neutron [-] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2086.453536] env[61215]: INFO nova.compute.manager [-] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] Took 0.04 seconds to deallocate network for instance. [ 2086.486629] env[61215]: DEBUG oslo_concurrency.lockutils [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2086.486867] env[61215]: DEBUG oslo_concurrency.lockutils [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2086.488306] env[61215]: INFO nova.compute.claims [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2086.537816] env[61215]: DEBUG oslo_concurrency.lockutils [None req-93043f15-32f4-44d4-9654-69a75a32cad9 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Lock "c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.178s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2086.538666] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 253.571s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2086.538858] env[61215]: INFO nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9] During sync_power_state the instance has a pending task (deleting). Skip. [ 2086.539053] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "c3e4a99c-5ddd-49aa-83ee-0edb5c0952b9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2086.719288] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58dbf4a9-d439-4612-854d-9db91298ab63 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.727757] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92b7df83-3867-4e00-8f74-44a2726ac823 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.757496] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe8597fe-ff0a-4feb-94b4-342f8ff374ab {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.764932] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de159753-72fe-4478-bc38-825eec6f72e7 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.777839] env[61215]: DEBUG nova.compute.provider_tree [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2086.786043] env[61215]: DEBUG nova.scheduler.client.report [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2086.799467] env[61215]: DEBUG oslo_concurrency.lockutils [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.313s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2086.799926] env[61215]: DEBUG nova.compute.manager [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Start building networks asynchronously for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2086.834393] env[61215]: DEBUG nova.compute.utils [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Using /dev/sd instead of None {{(pid=61215) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2086.836039] env[61215]: DEBUG nova.compute.manager [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Allocating IP information in the background. {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2086.836039] env[61215]: DEBUG nova.network.neutron [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] allocate_for_instance() {{(pid=61215) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2086.844117] env[61215]: DEBUG nova.compute.manager [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Start building block device mappings for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2086.901910] env[61215]: DEBUG nova.policy [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ab070511b64a4d14ad02c93184a2531a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'de7bfb159ad5462ab19c5ed5c1432b16', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61215) authorize /opt/stack/nova/nova/policy.py:203}} [ 2086.906115] env[61215]: DEBUG nova.compute.manager [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Start spawning the instance on the hypervisor. {{(pid=61215) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2086.932996] env[61215]: DEBUG nova.virt.hardware [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:05:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:04:45Z,direct_url=,disk_format='vmdk',id=e91f0c25-9ff9-4937-8440-f47cfb2028bc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9bc3f57c82894c5d9e08b66e77a25dc5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:04:46Z,virtual_size=,visibility=), allow threads: False {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2086.932996] env[61215]: DEBUG nova.virt.hardware [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Flavor limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2086.932996] env[61215]: DEBUG nova.virt.hardware [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Image limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2086.933469] env[61215]: DEBUG nova.virt.hardware [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Flavor pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2086.933469] env[61215]: DEBUG nova.virt.hardware [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Image pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2086.933584] env[61215]: DEBUG nova.virt.hardware [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2086.933691] env[61215]: DEBUG nova.virt.hardware [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2086.933848] env[61215]: DEBUG nova.virt.hardware [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2086.934142] env[61215]: DEBUG nova.virt.hardware [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Got 1 possible topologies {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2086.935344] env[61215]: DEBUG nova.virt.hardware [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2086.935344] env[61215]: DEBUG nova.virt.hardware [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2086.935485] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c78b0c1-1692-49bd-ba3d-fca5c27838ab {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.945645] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1fe401b-8fd1-4090-ba61-2ed3e1a266a1 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.326605] env[61215]: DEBUG nova.network.neutron [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Successfully created port: 18bca4ae-2e5d-408b-af09-4f91b8809718 {{(pid=61215) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2087.972469] env[61215]: DEBUG nova.network.neutron [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Successfully updated port: 18bca4ae-2e5d-408b-af09-4f91b8809718 {{(pid=61215) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2087.984992] env[61215]: DEBUG oslo_concurrency.lockutils [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Acquiring lock "refresh_cache-17d70ed3-4a82-48c8-95ad-c81fb0772e42" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2087.985150] env[61215]: DEBUG oslo_concurrency.lockutils [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Acquired lock "refresh_cache-17d70ed3-4a82-48c8-95ad-c81fb0772e42" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2087.985303] env[61215]: DEBUG nova.network.neutron [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2088.030523] env[61215]: DEBUG nova.network.neutron [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2088.294769] env[61215]: DEBUG nova.compute.manager [req-569ebd5e-1af5-442e-8ce7-be6fadf5a74d req-fe8f0274-aadd-4534-9e0d-6cab1dadf4bd service nova] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Received event network-vif-plugged-18bca4ae-2e5d-408b-af09-4f91b8809718 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 2088.294769] env[61215]: DEBUG oslo_concurrency.lockutils [req-569ebd5e-1af5-442e-8ce7-be6fadf5a74d req-fe8f0274-aadd-4534-9e0d-6cab1dadf4bd service nova] Acquiring lock "17d70ed3-4a82-48c8-95ad-c81fb0772e42-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2088.294769] env[61215]: DEBUG oslo_concurrency.lockutils [req-569ebd5e-1af5-442e-8ce7-be6fadf5a74d req-fe8f0274-aadd-4534-9e0d-6cab1dadf4bd service nova] Lock "17d70ed3-4a82-48c8-95ad-c81fb0772e42-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2088.294769] env[61215]: DEBUG oslo_concurrency.lockutils [req-569ebd5e-1af5-442e-8ce7-be6fadf5a74d req-fe8f0274-aadd-4534-9e0d-6cab1dadf4bd service nova] Lock "17d70ed3-4a82-48c8-95ad-c81fb0772e42-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2088.294949] env[61215]: DEBUG nova.compute.manager [req-569ebd5e-1af5-442e-8ce7-be6fadf5a74d req-fe8f0274-aadd-4534-9e0d-6cab1dadf4bd service nova] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] No waiting events found dispatching network-vif-plugged-18bca4ae-2e5d-408b-af09-4f91b8809718 {{(pid=61215) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2088.295226] env[61215]: WARNING nova.compute.manager [req-569ebd5e-1af5-442e-8ce7-be6fadf5a74d req-fe8f0274-aadd-4534-9e0d-6cab1dadf4bd service nova] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Received unexpected event network-vif-plugged-18bca4ae-2e5d-408b-af09-4f91b8809718 for instance with vm_state building and task_state spawning. [ 2088.295290] env[61215]: DEBUG nova.compute.manager [req-569ebd5e-1af5-442e-8ce7-be6fadf5a74d req-fe8f0274-aadd-4534-9e0d-6cab1dadf4bd service nova] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Received event network-changed-18bca4ae-2e5d-408b-af09-4f91b8809718 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 2088.295437] env[61215]: DEBUG nova.compute.manager [req-569ebd5e-1af5-442e-8ce7-be6fadf5a74d req-fe8f0274-aadd-4534-9e0d-6cab1dadf4bd service nova] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Refreshing instance network info cache due to event network-changed-18bca4ae-2e5d-408b-af09-4f91b8809718. {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 2088.295611] env[61215]: DEBUG oslo_concurrency.lockutils [req-569ebd5e-1af5-442e-8ce7-be6fadf5a74d req-fe8f0274-aadd-4534-9e0d-6cab1dadf4bd service nova] Acquiring lock "refresh_cache-17d70ed3-4a82-48c8-95ad-c81fb0772e42" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2088.303555] env[61215]: DEBUG nova.network.neutron [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Updating instance_info_cache with network_info: [{"id": "18bca4ae-2e5d-408b-af09-4f91b8809718", "address": "fa:16:3e:ca:77:64", "network": {"id": "aca5b4c9-c42c-4822-82bd-5db9917c1d95", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1452760311-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de7bfb159ad5462ab19c5ed5c1432b16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37fb1918-d178-4e12-93e6-316381e78be4", "external-id": "nsx-vlan-transportzone-763", "segmentation_id": 763, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18bca4ae-2e", "ovs_interfaceid": "18bca4ae-2e5d-408b-af09-4f91b8809718", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2088.315940] env[61215]: DEBUG oslo_concurrency.lockutils [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Releasing lock "refresh_cache-17d70ed3-4a82-48c8-95ad-c81fb0772e42" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2088.316236] env[61215]: DEBUG nova.compute.manager [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Instance network_info: |[{"id": "18bca4ae-2e5d-408b-af09-4f91b8809718", "address": "fa:16:3e:ca:77:64", "network": {"id": "aca5b4c9-c42c-4822-82bd-5db9917c1d95", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1452760311-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de7bfb159ad5462ab19c5ed5c1432b16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37fb1918-d178-4e12-93e6-316381e78be4", "external-id": "nsx-vlan-transportzone-763", "segmentation_id": 763, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18bca4ae-2e", "ovs_interfaceid": "18bca4ae-2e5d-408b-af09-4f91b8809718", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2088.316525] env[61215]: DEBUG oslo_concurrency.lockutils [req-569ebd5e-1af5-442e-8ce7-be6fadf5a74d req-fe8f0274-aadd-4534-9e0d-6cab1dadf4bd service nova] Acquired lock "refresh_cache-17d70ed3-4a82-48c8-95ad-c81fb0772e42" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2088.316705] env[61215]: DEBUG nova.network.neutron [req-569ebd5e-1af5-442e-8ce7-be6fadf5a74d req-fe8f0274-aadd-4534-9e0d-6cab1dadf4bd service nova] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Refreshing network info cache for port 18bca4ae-2e5d-408b-af09-4f91b8809718 {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2088.317745] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ca:77:64', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '37fb1918-d178-4e12-93e6-316381e78be4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '18bca4ae-2e5d-408b-af09-4f91b8809718', 'vif_model': 'vmxnet3'}] {{(pid=61215) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2088.325392] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Creating folder: Project (de7bfb159ad5462ab19c5ed5c1432b16). Parent ref: group-v352463. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2088.326271] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c79f18ce-6a6a-40d0-abc3-c0bf4caff6d4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.338932] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Created folder: Project (de7bfb159ad5462ab19c5ed5c1432b16) in parent group-v352463. [ 2088.339128] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Creating folder: Instances. Parent ref: group-v352549. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2088.339354] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-abef3ccb-595c-4d65-bd80-136229171c5e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.348943] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Created folder: Instances in parent group-v352549. [ 2088.349312] env[61215]: DEBUG oslo.service.loopingcall [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2088.349525] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Creating VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2088.349726] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f42d9639-dede-4cbc-aed7-fd01ca9dfb71 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.374965] env[61215]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2088.374965] env[61215]: value = "task-1690408" [ 2088.374965] env[61215]: _type = "Task" [ 2088.374965] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2088.383702] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690408, 'name': CreateVM_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2088.588842] env[61215]: DEBUG nova.network.neutron [req-569ebd5e-1af5-442e-8ce7-be6fadf5a74d req-fe8f0274-aadd-4534-9e0d-6cab1dadf4bd service nova] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Updated VIF entry in instance network info cache for port 18bca4ae-2e5d-408b-af09-4f91b8809718. {{(pid=61215) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2088.589237] env[61215]: DEBUG nova.network.neutron [req-569ebd5e-1af5-442e-8ce7-be6fadf5a74d req-fe8f0274-aadd-4534-9e0d-6cab1dadf4bd service nova] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Updating instance_info_cache with network_info: [{"id": "18bca4ae-2e5d-408b-af09-4f91b8809718", "address": "fa:16:3e:ca:77:64", "network": {"id": "aca5b4c9-c42c-4822-82bd-5db9917c1d95", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1452760311-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de7bfb159ad5462ab19c5ed5c1432b16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37fb1918-d178-4e12-93e6-316381e78be4", "external-id": "nsx-vlan-transportzone-763", "segmentation_id": 763, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18bca4ae-2e", "ovs_interfaceid": "18bca4ae-2e5d-408b-af09-4f91b8809718", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2088.599542] env[61215]: DEBUG oslo_concurrency.lockutils [req-569ebd5e-1af5-442e-8ce7-be6fadf5a74d req-fe8f0274-aadd-4534-9e0d-6cab1dadf4bd service nova] Releasing lock "refresh_cache-17d70ed3-4a82-48c8-95ad-c81fb0772e42" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2088.884455] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690408, 'name': CreateVM_Task, 'duration_secs': 0.295565} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2088.884630] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Created VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2088.885313] env[61215]: DEBUG oslo_concurrency.lockutils [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2088.885483] env[61215]: DEBUG oslo_concurrency.lockutils [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2088.885803] env[61215]: DEBUG oslo_concurrency.lockutils [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2088.886058] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-428614a3-7cf0-45cb-8d75-5eac36ee7a7b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.890241] env[61215]: DEBUG oslo_vmware.api [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Waiting for the task: (returnval){ [ 2088.890241] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]527a32ae-4945-bed8-15c5-498b56f5d007" [ 2088.890241] env[61215]: _type = "Task" [ 2088.890241] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2088.897238] env[61215]: DEBUG oslo_vmware.api [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]527a32ae-4945-bed8-15c5-498b56f5d007, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2089.401897] env[61215]: DEBUG oslo_concurrency.lockutils [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2089.401897] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Processing image e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2089.401897] env[61215]: DEBUG oslo_concurrency.lockutils [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2089.538350] env[61215]: DEBUG oslo_concurrency.lockutils [None req-59ea7b36-68f6-42da-8d3e-a11abe9b7350 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Acquiring lock "17d70ed3-4a82-48c8-95ad-c81fb0772e42" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2090.716856] env[61215]: DEBUG oslo_concurrency.lockutils [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquiring lock "49ab8e42-2da3-474b-b283-9d31b089fd76" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2090.717162] env[61215]: DEBUG oslo_concurrency.lockutils [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Lock "49ab8e42-2da3-474b-b283-9d31b089fd76" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2094.671750] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2099.654477] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2100.654537] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2100.654879] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2101.654825] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2102.653792] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2102.653972] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Starting heal instance info cache {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 2102.654118] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Rebuilding the list of instances to heal {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2102.677212] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2102.677534] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2102.677534] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2102.677615] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2102.677731] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2102.677853] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2102.677974] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2102.678104] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2102.678229] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2102.678351] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2102.678474] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Didn't find any instances for network info cache update. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 2102.678951] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2102.679212] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61215) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 2102.679426] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2102.690610] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2102.690982] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2102.690982] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2102.691138] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61215) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2102.692625] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a579dd56-f391-4276-add3-707db18faa55 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.701136] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b17324ff-2634-4ccb-b3ff-4c286119ef0f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.714721] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2adb15d4-92ad-481c-a154-677b126c6212 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.720841] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f16cf530-c3df-4d29-9bae-30913d3e5fb6 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.748924] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181321MB free_disk=173GB free_vcpus=48 pci_devices=None {{(pid=61215) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2102.749077] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2102.749267] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2102.899704] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 2e186217-c1e1-40c6-8d84-988f35f6b93d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2102.899877] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2102.900017] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance c233ab81-232d-49be-a176-bf846f0d8cc3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2102.900147] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance bb56c470-9f85-44b1-b1ec-f44236e9de51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2102.900266] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 38fe96cf-e570-4c0e-af6f-2f199efc06ff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2102.900385] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 0d609df2-621c-456f-b8ce-a209e9052153 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2102.900501] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 66420497-c0f6-4f1d-86ee-23d53400e325 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2102.900616] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 59d93243-c15c-4554-863b-779d94b3d858 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2102.900732] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance f3a3a510-a085-4388-b49d-b4371095b436 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2102.900843] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 17d70ed3-4a82-48c8-95ad-c81fb0772e42 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2102.911723] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 90f1ad20-b501-4f1e-95ff-1d428c51c242 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2102.928884] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 02e32086-8fe7-4def-ac71-7c4c43ee0f23 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2102.935329] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance fb5fb791-5f62-4717-8d8f-7d56ffda15be has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2102.945731] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 455e7272-f099-496f-b929-ed6fa9a0ab44 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2102.957555] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 9eacbeb5-b918-4b0f-82f4-d06a037803df has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2102.967890] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 49ab8e42-2da3-474b-b283-9d31b089fd76 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2102.968131] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2102.968288] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2102.984030] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Refreshing inventories for resource provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 2102.998028] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Updating ProviderTree inventory for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 2102.998209] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Updating inventory in ProviderTree for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2103.008226] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Refreshing aggregate associations for resource provider 1329e087-aa78-44a2-9687-63a2b1b33fd5, aggregates: None {{(pid=61215) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 2103.025663] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Refreshing trait associations for resource provider 1329e087-aa78-44a2-9687-63a2b1b33fd5, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=61215) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 2103.229024] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f202b3b-ef1f-4201-afe3-819ffec7b69b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.234267] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4c517ec-9928-4385-a3b7-71138c28f325 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.264137] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21ae32c7-06ff-4603-9f3c-c364834ac9ee {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.271613] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53aee85c-ede0-419e-ad24-52f1124dbe99 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.284543] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2103.292729] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2103.306638] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61215) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2103.306820] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.558s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2104.282930] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2105.654528] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2105.654822] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Cleaning up deleted instances with incomplete migration {{(pid=61215) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11258}} [ 2107.666645] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2107.666892] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Cleaning up deleted instances {{(pid=61215) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11220}} [ 2107.675534] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] There are 0 instances to clean {{(pid=61215) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 2109.654692] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2131.622862] env[61215]: DEBUG oslo_concurrency.lockutils [None req-cc8fe1cb-fe62-45cb-9272-34615e688f64 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Acquiring lock "e4fdc9ea-dad8-4422-b110-5c5aa4e25f3d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2131.623204] env[61215]: DEBUG oslo_concurrency.lockutils [None req-cc8fe1cb-fe62-45cb-9272-34615e688f64 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Lock "e4fdc9ea-dad8-4422-b110-5c5aa4e25f3d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2133.634172] env[61215]: WARNING oslo_vmware.rw_handles [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2133.634172] env[61215]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2133.634172] env[61215]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2133.634172] env[61215]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2133.634172] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2133.634172] env[61215]: ERROR oslo_vmware.rw_handles response.begin() [ 2133.634172] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2133.634172] env[61215]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2133.634172] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2133.634172] env[61215]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2133.634172] env[61215]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2133.634172] env[61215]: ERROR oslo_vmware.rw_handles [ 2133.634851] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Downloaded image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to vmware_temp/05428117-3383-4382-8b18-a01560f451c3/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2133.636768] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Caching image {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2133.637047] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Copying Virtual Disk [datastore1] vmware_temp/05428117-3383-4382-8b18-a01560f451c3/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk to [datastore1] vmware_temp/05428117-3383-4382-8b18-a01560f451c3/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk {{(pid=61215) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2133.637353] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6d765eb8-5d85-4f0d-a9bc-d7497f1539c8 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.645543] env[61215]: DEBUG oslo_vmware.api [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Waiting for the task: (returnval){ [ 2133.645543] env[61215]: value = "task-1690409" [ 2133.645543] env[61215]: _type = "Task" [ 2133.645543] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2133.653213] env[61215]: DEBUG oslo_vmware.api [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Task: {'id': task-1690409, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2134.155843] env[61215]: DEBUG oslo_vmware.exceptions [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Fault InvalidArgument not matched. {{(pid=61215) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2134.156156] env[61215]: DEBUG oslo_concurrency.lockutils [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2134.156719] env[61215]: ERROR nova.compute.manager [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2134.156719] env[61215]: Faults: ['InvalidArgument'] [ 2134.156719] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Traceback (most recent call last): [ 2134.156719] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2134.156719] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] yield resources [ 2134.156719] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2134.156719] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] self.driver.spawn(context, instance, image_meta, [ 2134.156719] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2134.156719] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2134.156719] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2134.156719] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] self._fetch_image_if_missing(context, vi) [ 2134.156719] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2134.156719] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] image_cache(vi, tmp_image_ds_loc) [ 2134.156719] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2134.156719] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] vm_util.copy_virtual_disk( [ 2134.156719] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2134.156719] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] session._wait_for_task(vmdk_copy_task) [ 2134.156719] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2134.156719] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] return self.wait_for_task(task_ref) [ 2134.156719] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2134.156719] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] return evt.wait() [ 2134.156719] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2134.156719] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] result = hub.switch() [ 2134.156719] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2134.156719] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] return self.greenlet.switch() [ 2134.156719] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2134.156719] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] self.f(*self.args, **self.kw) [ 2134.156719] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2134.156719] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] raise exceptions.translate_fault(task_info.error) [ 2134.156719] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2134.156719] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Faults: ['InvalidArgument'] [ 2134.156719] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] [ 2134.157700] env[61215]: INFO nova.compute.manager [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Terminating instance [ 2134.158691] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2134.158906] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2134.159427] env[61215]: DEBUG oslo_concurrency.lockutils [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Acquiring lock "refresh_cache-2e186217-c1e1-40c6-8d84-988f35f6b93d" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2134.159584] env[61215]: DEBUG oslo_concurrency.lockutils [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Acquired lock "refresh_cache-2e186217-c1e1-40c6-8d84-988f35f6b93d" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2134.159755] env[61215]: DEBUG nova.network.neutron [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2134.160726] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e4f640cd-abb5-4428-be2d-1120889366d8 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.171021] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2134.171209] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61215) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2134.172181] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0132d8fc-d4b4-4c33-b18c-372eb1cdb194 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.177327] env[61215]: DEBUG oslo_vmware.api [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Waiting for the task: (returnval){ [ 2134.177327] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]522dd66f-c028-95c4-3223-4413107d6d84" [ 2134.177327] env[61215]: _type = "Task" [ 2134.177327] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2134.185025] env[61215]: DEBUG oslo_vmware.api [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]522dd66f-c028-95c4-3223-4413107d6d84, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2134.188732] env[61215]: DEBUG nova.network.neutron [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2134.248999] env[61215]: DEBUG nova.network.neutron [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2134.257575] env[61215]: DEBUG oslo_concurrency.lockutils [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Releasing lock "refresh_cache-2e186217-c1e1-40c6-8d84-988f35f6b93d" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2134.257968] env[61215]: DEBUG nova.compute.manager [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2134.258180] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2134.259233] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84c54f1d-e896-4d1b-9ff8-14839fa8c32f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.267333] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Unregistering the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2134.267555] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8adf9547-27b5-4fcb-9b02-3e984ee9079a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.301347] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Unregistered the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2134.301549] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Deleting contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2134.301762] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Deleting the datastore file [datastore1] 2e186217-c1e1-40c6-8d84-988f35f6b93d {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2134.301999] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9496b8b3-3974-42be-b8f2-eb9e5666a98a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.308052] env[61215]: DEBUG oslo_vmware.api [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Waiting for the task: (returnval){ [ 2134.308052] env[61215]: value = "task-1690411" [ 2134.308052] env[61215]: _type = "Task" [ 2134.308052] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2134.315299] env[61215]: DEBUG oslo_vmware.api [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Task: {'id': task-1690411, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2134.688879] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Preparing fetch location {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2134.689262] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Creating directory with path [datastore1] vmware_temp/ee025c82-e6c3-4f5a-b605-c75d630378d4/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2134.689423] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e091fe3e-f95c-4476-8975-5b7d47f3747b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.702274] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Created directory with path [datastore1] vmware_temp/ee025c82-e6c3-4f5a-b605-c75d630378d4/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2134.702430] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Fetch image to [datastore1] vmware_temp/ee025c82-e6c3-4f5a-b605-c75d630378d4/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2134.702603] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to [datastore1] vmware_temp/ee025c82-e6c3-4f5a-b605-c75d630378d4/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2134.703356] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aba6fa73-73a1-4213-997c-cb9889499f23 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.710432] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aef383b5-dd25-4898-921a-90fd8651f3f0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.719880] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee964d42-dc25-449b-b8ce-fb27ed058c1f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.750740] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db530fad-3074-4042-871e-8b6e5162d8c4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.757107] env[61215]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-982e242f-8068-4a38-80bd-192dfc70dc36 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.778260] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2134.821029] env[61215]: DEBUG oslo_vmware.api [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Task: {'id': task-1690411, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.043995} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2134.821029] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2134.821029] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Deleted contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2134.821029] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2134.821029] env[61215]: INFO nova.compute.manager [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Took 0.56 seconds to destroy the instance on the hypervisor. [ 2134.821029] env[61215]: DEBUG oslo.service.loopingcall [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2134.821029] env[61215]: DEBUG nova.compute.manager [-] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Skipping network deallocation for instance since networking was not requested. {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 2134.822520] env[61215]: DEBUG nova.compute.claims [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Aborting claim: {{(pid=61215) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2134.822821] env[61215]: DEBUG oslo_concurrency.lockutils [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2134.823184] env[61215]: DEBUG oslo_concurrency.lockutils [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2134.840233] env[61215]: DEBUG oslo_vmware.rw_handles [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ee025c82-e6c3-4f5a-b605-c75d630378d4/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2134.904728] env[61215]: DEBUG oslo_vmware.rw_handles [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Completed reading data from the image iterator. {{(pid=61215) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2134.904921] env[61215]: DEBUG oslo_vmware.rw_handles [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ee025c82-e6c3-4f5a-b605-c75d630378d4/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2135.073656] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5d05041-4f10-43af-8566-3ed6b0bf8a16 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.081136] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17f0cf2f-85a2-4c3d-aa72-ef1cd0d6ebfd {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.111081] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b12d969a-51e3-45c1-a6c8-43aaccd0bc5b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.118058] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-296a5c76-24e0-43aa-b6c5-d1a56f6e7041 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.130825] env[61215]: DEBUG nova.compute.provider_tree [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2135.138668] env[61215]: DEBUG nova.scheduler.client.report [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2135.151686] env[61215]: DEBUG oslo_concurrency.lockutils [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.328s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2135.152234] env[61215]: ERROR nova.compute.manager [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2135.152234] env[61215]: Faults: ['InvalidArgument'] [ 2135.152234] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Traceback (most recent call last): [ 2135.152234] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2135.152234] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] self.driver.spawn(context, instance, image_meta, [ 2135.152234] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2135.152234] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2135.152234] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2135.152234] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] self._fetch_image_if_missing(context, vi) [ 2135.152234] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2135.152234] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] image_cache(vi, tmp_image_ds_loc) [ 2135.152234] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2135.152234] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] vm_util.copy_virtual_disk( [ 2135.152234] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2135.152234] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] session._wait_for_task(vmdk_copy_task) [ 2135.152234] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2135.152234] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] return self.wait_for_task(task_ref) [ 2135.152234] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2135.152234] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] return evt.wait() [ 2135.152234] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2135.152234] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] result = hub.switch() [ 2135.152234] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2135.152234] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] return self.greenlet.switch() [ 2135.152234] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2135.152234] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] self.f(*self.args, **self.kw) [ 2135.152234] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2135.152234] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] raise exceptions.translate_fault(task_info.error) [ 2135.152234] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2135.152234] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Faults: ['InvalidArgument'] [ 2135.152234] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] [ 2135.153034] env[61215]: DEBUG nova.compute.utils [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] VimFaultException {{(pid=61215) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2135.154385] env[61215]: DEBUG nova.compute.manager [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Build of instance 2e186217-c1e1-40c6-8d84-988f35f6b93d was re-scheduled: A specified parameter was not correct: fileType [ 2135.154385] env[61215]: Faults: ['InvalidArgument'] {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2135.154787] env[61215]: DEBUG nova.compute.manager [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Unplugging VIFs for instance {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2135.155030] env[61215]: DEBUG oslo_concurrency.lockutils [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Acquiring lock "refresh_cache-2e186217-c1e1-40c6-8d84-988f35f6b93d" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2135.155189] env[61215]: DEBUG oslo_concurrency.lockutils [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Acquired lock "refresh_cache-2e186217-c1e1-40c6-8d84-988f35f6b93d" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2135.155354] env[61215]: DEBUG nova.network.neutron [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2135.179966] env[61215]: DEBUG nova.network.neutron [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2135.237835] env[61215]: DEBUG nova.network.neutron [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2135.246203] env[61215]: DEBUG oslo_concurrency.lockutils [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Releasing lock "refresh_cache-2e186217-c1e1-40c6-8d84-988f35f6b93d" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2135.246408] env[61215]: DEBUG nova.compute.manager [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2135.246588] env[61215]: DEBUG nova.compute.manager [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Skipping network deallocation for instance since networking was not requested. {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 2135.332687] env[61215]: INFO nova.scheduler.client.report [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Deleted allocations for instance 2e186217-c1e1-40c6-8d84-988f35f6b93d [ 2135.353970] env[61215]: DEBUG oslo_concurrency.lockutils [None req-60da82c4-8bf4-4496-8da9-369896fd8cd0 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Lock "2e186217-c1e1-40c6-8d84-988f35f6b93d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 630.875s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2135.355145] env[61215]: DEBUG oslo_concurrency.lockutils [None req-fd389b6d-29ff-40c2-a2fd-178785d6ff08 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Lock "2e186217-c1e1-40c6-8d84-988f35f6b93d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 432.730s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2135.355343] env[61215]: DEBUG oslo_concurrency.lockutils [None req-fd389b6d-29ff-40c2-a2fd-178785d6ff08 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Acquiring lock "2e186217-c1e1-40c6-8d84-988f35f6b93d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2135.355549] env[61215]: DEBUG oslo_concurrency.lockutils [None req-fd389b6d-29ff-40c2-a2fd-178785d6ff08 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Lock "2e186217-c1e1-40c6-8d84-988f35f6b93d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2135.355757] env[61215]: DEBUG oslo_concurrency.lockutils [None req-fd389b6d-29ff-40c2-a2fd-178785d6ff08 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Lock "2e186217-c1e1-40c6-8d84-988f35f6b93d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2135.357897] env[61215]: INFO nova.compute.manager [None req-fd389b6d-29ff-40c2-a2fd-178785d6ff08 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Terminating instance [ 2135.359758] env[61215]: DEBUG oslo_concurrency.lockutils [None req-fd389b6d-29ff-40c2-a2fd-178785d6ff08 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Acquiring lock "refresh_cache-2e186217-c1e1-40c6-8d84-988f35f6b93d" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2135.359944] env[61215]: DEBUG oslo_concurrency.lockutils [None req-fd389b6d-29ff-40c2-a2fd-178785d6ff08 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Acquired lock "refresh_cache-2e186217-c1e1-40c6-8d84-988f35f6b93d" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2135.360162] env[61215]: DEBUG nova.network.neutron [None req-fd389b6d-29ff-40c2-a2fd-178785d6ff08 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2135.364669] env[61215]: DEBUG nova.compute.manager [None req-eb8d599d-1968-4e6b-a515-d88704d582da tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: be5cf1b4-da97-4944-bb38-f10943576b8f] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2135.385613] env[61215]: DEBUG nova.network.neutron [None req-fd389b6d-29ff-40c2-a2fd-178785d6ff08 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2135.393710] env[61215]: DEBUG nova.compute.manager [None req-eb8d599d-1968-4e6b-a515-d88704d582da tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: be5cf1b4-da97-4944-bb38-f10943576b8f] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 2135.416845] env[61215]: DEBUG oslo_concurrency.lockutils [None req-eb8d599d-1968-4e6b-a515-d88704d582da tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Lock "be5cf1b4-da97-4944-bb38-f10943576b8f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 241.972s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2135.425412] env[61215]: DEBUG nova.compute.manager [None req-8c624a34-09da-48ba-8dda-5403d0c5cee9 tempest-AttachVolumeShelveTestJSON-1147171237 tempest-AttachVolumeShelveTestJSON-1147171237-project-member] [instance: 3a7e93d3-cef2-4b3f-a20b-da373780e1c8] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2135.446674] env[61215]: DEBUG nova.network.neutron [None req-fd389b6d-29ff-40c2-a2fd-178785d6ff08 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2135.448341] env[61215]: DEBUG nova.compute.manager [None req-8c624a34-09da-48ba-8dda-5403d0c5cee9 tempest-AttachVolumeShelveTestJSON-1147171237 tempest-AttachVolumeShelveTestJSON-1147171237-project-member] [instance: 3a7e93d3-cef2-4b3f-a20b-da373780e1c8] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 2135.454467] env[61215]: DEBUG oslo_concurrency.lockutils [None req-fd389b6d-29ff-40c2-a2fd-178785d6ff08 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Releasing lock "refresh_cache-2e186217-c1e1-40c6-8d84-988f35f6b93d" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2135.454868] env[61215]: DEBUG nova.compute.manager [None req-fd389b6d-29ff-40c2-a2fd-178785d6ff08 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2135.455075] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-fd389b6d-29ff-40c2-a2fd-178785d6ff08 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2135.455559] env[61215]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5ba9cdf7-ec28-4f09-87e6-86c62cc6afe6 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.467394] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ba81368-819d-4f4c-86f4-1430a8074eb7 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.478263] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8c624a34-09da-48ba-8dda-5403d0c5cee9 tempest-AttachVolumeShelveTestJSON-1147171237 tempest-AttachVolumeShelveTestJSON-1147171237-project-member] Lock "3a7e93d3-cef2-4b3f-a20b-da373780e1c8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 240.033s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2135.487587] env[61215]: DEBUG nova.compute.manager [None req-56d03437-a8f9-46fd-a343-b1168a0e2e07 tempest-InstanceActionsV221TestJSON-1482815884 tempest-InstanceActionsV221TestJSON-1482815884-project-member] [instance: 90f1ad20-b501-4f1e-95ff-1d428c51c242] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2135.498247] env[61215]: WARNING nova.virt.vmwareapi.vmops [None req-fd389b6d-29ff-40c2-a2fd-178785d6ff08 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2e186217-c1e1-40c6-8d84-988f35f6b93d could not be found. [ 2135.498435] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-fd389b6d-29ff-40c2-a2fd-178785d6ff08 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2135.498611] env[61215]: INFO nova.compute.manager [None req-fd389b6d-29ff-40c2-a2fd-178785d6ff08 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2135.498848] env[61215]: DEBUG oslo.service.loopingcall [None req-fd389b6d-29ff-40c2-a2fd-178785d6ff08 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2135.499244] env[61215]: DEBUG nova.compute.manager [-] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2135.499347] env[61215]: DEBUG nova.network.neutron [-] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2135.510660] env[61215]: DEBUG nova.compute.manager [None req-56d03437-a8f9-46fd-a343-b1168a0e2e07 tempest-InstanceActionsV221TestJSON-1482815884 tempest-InstanceActionsV221TestJSON-1482815884-project-member] [instance: 90f1ad20-b501-4f1e-95ff-1d428c51c242] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 2135.530632] env[61215]: DEBUG oslo_concurrency.lockutils [None req-56d03437-a8f9-46fd-a343-b1168a0e2e07 tempest-InstanceActionsV221TestJSON-1482815884 tempest-InstanceActionsV221TestJSON-1482815884-project-member] Lock "90f1ad20-b501-4f1e-95ff-1d428c51c242" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 206.737s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2135.541233] env[61215]: DEBUG nova.compute.manager [None req-58cf2928-028f-4efd-acf6-6aa48060bdd0 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 02e32086-8fe7-4def-ac71-7c4c43ee0f23] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2135.566701] env[61215]: DEBUG nova.compute.manager [None req-58cf2928-028f-4efd-acf6-6aa48060bdd0 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 02e32086-8fe7-4def-ac71-7c4c43ee0f23] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 2135.587392] env[61215]: DEBUG oslo_concurrency.lockutils [None req-58cf2928-028f-4efd-acf6-6aa48060bdd0 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Lock "02e32086-8fe7-4def-ac71-7c4c43ee0f23" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 201.315s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2135.596387] env[61215]: DEBUG nova.compute.manager [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2135.651806] env[61215]: DEBUG oslo_concurrency.lockutils [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2135.652073] env[61215]: DEBUG oslo_concurrency.lockutils [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2135.653777] env[61215]: INFO nova.compute.claims [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2135.668720] env[61215]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61215) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 2135.669947] env[61215]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-4e73a7cf-0866-4874-ab2a-9cce5d55c45f'] [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall self._deallocate_network( [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2135.669947] env[61215]: ERROR oslo.service.loopingcall [ 2135.671359] env[61215]: ERROR nova.compute.manager [None req-fd389b6d-29ff-40c2-a2fd-178785d6ff08 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2135.702074] env[61215]: ERROR nova.compute.manager [None req-fd389b6d-29ff-40c2-a2fd-178785d6ff08 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2135.702074] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Traceback (most recent call last): [ 2135.702074] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2135.702074] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] ret = obj(*args, **kwargs) [ 2135.702074] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2135.702074] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] exception_handler_v20(status_code, error_body) [ 2135.702074] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2135.702074] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] raise client_exc(message=error_message, [ 2135.702074] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2135.702074] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Neutron server returns request_ids: ['req-4e73a7cf-0866-4874-ab2a-9cce5d55c45f'] [ 2135.702074] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] [ 2135.702074] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] During handling of the above exception, another exception occurred: [ 2135.702074] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] [ 2135.702074] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Traceback (most recent call last): [ 2135.702074] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 2135.702074] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] self._delete_instance(context, instance, bdms) [ 2135.702074] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 2135.702074] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] self._shutdown_instance(context, instance, bdms) [ 2135.702074] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 2135.702074] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] self._try_deallocate_network(context, instance, requested_networks) [ 2135.702074] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 2135.702074] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] with excutils.save_and_reraise_exception(): [ 2135.702074] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2135.702074] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] self.force_reraise() [ 2135.702074] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2135.702074] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] raise self.value [ 2135.702074] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 2135.702074] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] _deallocate_network_with_retries() [ 2135.702074] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 2135.702074] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] return evt.wait() [ 2135.702074] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2135.702074] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] result = hub.switch() [ 2135.702074] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2135.702074] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] return self.greenlet.switch() [ 2135.702074] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2135.702074] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] result = func(*self.args, **self.kw) [ 2135.702074] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2135.703346] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] result = f(*args, **kwargs) [ 2135.703346] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 2135.703346] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] self._deallocate_network( [ 2135.703346] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 2135.703346] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] self.network_api.deallocate_for_instance( [ 2135.703346] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2135.703346] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] data = neutron.list_ports(**search_opts) [ 2135.703346] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2135.703346] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] ret = obj(*args, **kwargs) [ 2135.703346] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2135.703346] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] return self.list('ports', self.ports_path, retrieve_all, [ 2135.703346] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2135.703346] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] ret = obj(*args, **kwargs) [ 2135.703346] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2135.703346] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] for r in self._pagination(collection, path, **params): [ 2135.703346] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2135.703346] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] res = self.get(path, params=params) [ 2135.703346] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2135.703346] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] ret = obj(*args, **kwargs) [ 2135.703346] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2135.703346] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] return self.retry_request("GET", action, body=body, [ 2135.703346] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2135.703346] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] ret = obj(*args, **kwargs) [ 2135.703346] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2135.703346] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] return self.do_request(method, action, body=body, [ 2135.703346] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2135.703346] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] ret = obj(*args, **kwargs) [ 2135.703346] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2135.703346] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] self._handle_fault_response(status_code, replybody, resp) [ 2135.703346] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2135.703346] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2135.703346] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2135.703346] env[61215]: ERROR nova.compute.manager [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] [ 2135.730384] env[61215]: DEBUG oslo_concurrency.lockutils [None req-fd389b6d-29ff-40c2-a2fd-178785d6ff08 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Lock "2e186217-c1e1-40c6-8d84-988f35f6b93d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.375s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2135.731484] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "2e186217-c1e1-40c6-8d84-988f35f6b93d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 302.764s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2135.732197] env[61215]: INFO nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] During sync_power_state the instance has a pending task (deleting). Skip. [ 2135.732197] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "2e186217-c1e1-40c6-8d84-988f35f6b93d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2135.789114] env[61215]: INFO nova.compute.manager [None req-fd389b6d-29ff-40c2-a2fd-178785d6ff08 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] [instance: 2e186217-c1e1-40c6-8d84-988f35f6b93d] Successfully reverted task state from None on failure for instance. [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server [None req-fd389b6d-29ff-40c2-a2fd-178785d6ff08 tempest-ServersAaction247Test-408738690 tempest-ServersAaction247Test-408738690-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-4e73a7cf-0866-4874-ab2a-9cce5d55c45f'] [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server raise self.value [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server raise self.value [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server raise self.value [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3345, in terminate_instance [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in do_terminate_instance [ 2135.792666] env[61215]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server raise self.value [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server raise self.value [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server return evt.wait() [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2135.794268] env[61215]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2135.796267] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2135.796267] env[61215]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 2135.796267] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2135.796267] env[61215]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2135.796267] env[61215]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2135.796267] env[61215]: ERROR oslo_messaging.rpc.server [ 2135.870704] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7240f09-0279-4fb6-a580-e9648c831b87 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.878541] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e6e727c-3c3f-44ca-b9d4-dba593378d8f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.910466] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20c91811-60dd-4837-9b99-3fa84759ef4b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.917651] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5c63feb-ca67-4eaf-b1fd-69a6b418cf9a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.930549] env[61215]: DEBUG nova.compute.provider_tree [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2135.938745] env[61215]: DEBUG nova.scheduler.client.report [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2135.955283] env[61215]: DEBUG oslo_concurrency.lockutils [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.303s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2135.955771] env[61215]: DEBUG nova.compute.manager [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Start building networks asynchronously for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2135.989072] env[61215]: DEBUG nova.compute.utils [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Using /dev/sd instead of None {{(pid=61215) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2135.989913] env[61215]: DEBUG nova.compute.manager [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Allocating IP information in the background. {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2135.990104] env[61215]: DEBUG nova.network.neutron [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] allocate_for_instance() {{(pid=61215) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2136.000082] env[61215]: DEBUG nova.compute.manager [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Start building block device mappings for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2136.045621] env[61215]: DEBUG nova.policy [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '16f73546cd9449e6abe8212587203a95', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2acceb9d1de64b5a999ec1989ca85aeb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61215) authorize /opt/stack/nova/nova/policy.py:203}} [ 2136.062134] env[61215]: DEBUG nova.compute.manager [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Start spawning the instance on the hypervisor. {{(pid=61215) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2136.088592] env[61215]: DEBUG nova.virt.hardware [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:05:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:04:45Z,direct_url=,disk_format='vmdk',id=e91f0c25-9ff9-4937-8440-f47cfb2028bc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9bc3f57c82894c5d9e08b66e77a25dc5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:04:46Z,virtual_size=,visibility=), allow threads: False {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2136.088880] env[61215]: DEBUG nova.virt.hardware [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Flavor limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2136.088951] env[61215]: DEBUG nova.virt.hardware [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Image limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2136.089150] env[61215]: DEBUG nova.virt.hardware [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Flavor pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2136.089285] env[61215]: DEBUG nova.virt.hardware [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Image pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2136.089442] env[61215]: DEBUG nova.virt.hardware [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2136.089652] env[61215]: DEBUG nova.virt.hardware [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2136.089814] env[61215]: DEBUG nova.virt.hardware [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2136.089985] env[61215]: DEBUG nova.virt.hardware [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Got 1 possible topologies {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2136.090166] env[61215]: DEBUG nova.virt.hardware [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2136.090342] env[61215]: DEBUG nova.virt.hardware [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2136.091227] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95550bc5-70e0-4f38-ae75-ae0c11c11d91 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.099608] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8906882-4f61-4752-9124-ca036729c945 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.410920] env[61215]: DEBUG nova.network.neutron [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Successfully created port: a8bfb8bf-5f28-442e-86a9-bce1dbcbb9f6 {{(pid=61215) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2136.959384] env[61215]: DEBUG nova.compute.manager [req-3edff902-edb5-418a-824e-a51200ba9b00 req-a1bdb80e-f2a5-47f6-8a9e-a51838287bab service nova] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Received event network-vif-plugged-a8bfb8bf-5f28-442e-86a9-bce1dbcbb9f6 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 2136.959640] env[61215]: DEBUG oslo_concurrency.lockutils [req-3edff902-edb5-418a-824e-a51200ba9b00 req-a1bdb80e-f2a5-47f6-8a9e-a51838287bab service nova] Acquiring lock "fb5fb791-5f62-4717-8d8f-7d56ffda15be-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2136.959859] env[61215]: DEBUG oslo_concurrency.lockutils [req-3edff902-edb5-418a-824e-a51200ba9b00 req-a1bdb80e-f2a5-47f6-8a9e-a51838287bab service nova] Lock "fb5fb791-5f62-4717-8d8f-7d56ffda15be-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2136.960043] env[61215]: DEBUG oslo_concurrency.lockutils [req-3edff902-edb5-418a-824e-a51200ba9b00 req-a1bdb80e-f2a5-47f6-8a9e-a51838287bab service nova] Lock "fb5fb791-5f62-4717-8d8f-7d56ffda15be-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2136.960272] env[61215]: DEBUG nova.compute.manager [req-3edff902-edb5-418a-824e-a51200ba9b00 req-a1bdb80e-f2a5-47f6-8a9e-a51838287bab service nova] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] No waiting events found dispatching network-vif-plugged-a8bfb8bf-5f28-442e-86a9-bce1dbcbb9f6 {{(pid=61215) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2136.960456] env[61215]: WARNING nova.compute.manager [req-3edff902-edb5-418a-824e-a51200ba9b00 req-a1bdb80e-f2a5-47f6-8a9e-a51838287bab service nova] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Received unexpected event network-vif-plugged-a8bfb8bf-5f28-442e-86a9-bce1dbcbb9f6 for instance with vm_state building and task_state spawning. [ 2137.092742] env[61215]: DEBUG nova.network.neutron [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Successfully updated port: a8bfb8bf-5f28-442e-86a9-bce1dbcbb9f6 {{(pid=61215) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2137.109120] env[61215]: DEBUG oslo_concurrency.lockutils [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Acquiring lock "refresh_cache-fb5fb791-5f62-4717-8d8f-7d56ffda15be" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2137.109283] env[61215]: DEBUG oslo_concurrency.lockutils [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Acquired lock "refresh_cache-fb5fb791-5f62-4717-8d8f-7d56ffda15be" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2137.109503] env[61215]: DEBUG nova.network.neutron [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2137.176720] env[61215]: DEBUG nova.network.neutron [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2137.354107] env[61215]: DEBUG nova.network.neutron [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Updating instance_info_cache with network_info: [{"id": "a8bfb8bf-5f28-442e-86a9-bce1dbcbb9f6", "address": "fa:16:3e:b4:0b:c8", "network": {"id": "e4c6e40c-817e-4595-a4ad-907f9c2c24ff", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-195782996-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2acceb9d1de64b5a999ec1989ca85aeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bafe8721-91d4-4127-b215-d9e8e27947dc", "external-id": "nsx-vlan-transportzone-680", "segmentation_id": 680, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8bfb8bf-5f", "ovs_interfaceid": "a8bfb8bf-5f28-442e-86a9-bce1dbcbb9f6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2137.369619] env[61215]: DEBUG oslo_concurrency.lockutils [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Releasing lock "refresh_cache-fb5fb791-5f62-4717-8d8f-7d56ffda15be" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2137.369895] env[61215]: DEBUG nova.compute.manager [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Instance network_info: |[{"id": "a8bfb8bf-5f28-442e-86a9-bce1dbcbb9f6", "address": "fa:16:3e:b4:0b:c8", "network": {"id": "e4c6e40c-817e-4595-a4ad-907f9c2c24ff", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-195782996-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2acceb9d1de64b5a999ec1989ca85aeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bafe8721-91d4-4127-b215-d9e8e27947dc", "external-id": "nsx-vlan-transportzone-680", "segmentation_id": 680, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8bfb8bf-5f", "ovs_interfaceid": "a8bfb8bf-5f28-442e-86a9-bce1dbcbb9f6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2137.370314] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b4:0b:c8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bafe8721-91d4-4127-b215-d9e8e27947dc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a8bfb8bf-5f28-442e-86a9-bce1dbcbb9f6', 'vif_model': 'vmxnet3'}] {{(pid=61215) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2137.377807] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Creating folder: Project (2acceb9d1de64b5a999ec1989ca85aeb). Parent ref: group-v352463. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2137.378317] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c2f75f83-a5fe-4b19-bbb7-30de64cd322a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.389244] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Created folder: Project (2acceb9d1de64b5a999ec1989ca85aeb) in parent group-v352463. [ 2137.389422] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Creating folder: Instances. Parent ref: group-v352552. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2137.389630] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0c193448-a310-48fe-baa0-6104c2ea59ef {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.398921] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Created folder: Instances in parent group-v352552. [ 2137.399151] env[61215]: DEBUG oslo.service.loopingcall [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2137.399325] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Creating VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2137.399510] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6fb20cdf-b14f-49d7-add0-0775c5e30b30 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.418463] env[61215]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2137.418463] env[61215]: value = "task-1690414" [ 2137.418463] env[61215]: _type = "Task" [ 2137.418463] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2137.425764] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690414, 'name': CreateVM_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2137.928732] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690414, 'name': CreateVM_Task} progress is 99%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2138.428942] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690414, 'name': CreateVM_Task} progress is 99%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2138.929099] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690414, 'name': CreateVM_Task, 'duration_secs': 1.305398} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2138.929313] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Created VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2138.930032] env[61215]: DEBUG oslo_concurrency.lockutils [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2138.930276] env[61215]: DEBUG oslo_concurrency.lockutils [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2138.930626] env[61215]: DEBUG oslo_concurrency.lockutils [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2138.930902] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-82bf06e5-4d8a-4401-9a7a-eee705059cef {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2138.935355] env[61215]: DEBUG oslo_vmware.api [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Waiting for the task: (returnval){ [ 2138.935355] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52862a2d-05bf-3ff5-f021-c417855b42ee" [ 2138.935355] env[61215]: _type = "Task" [ 2138.935355] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2138.943973] env[61215]: DEBUG oslo_vmware.api [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52862a2d-05bf-3ff5-f021-c417855b42ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2138.987202] env[61215]: DEBUG nova.compute.manager [req-7f41ad98-82c9-4eba-a681-debad6fbf2d4 req-dccc6486-a93c-4970-8ed1-ae7a9c0d4a9f service nova] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Received event network-changed-a8bfb8bf-5f28-442e-86a9-bce1dbcbb9f6 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 2138.987355] env[61215]: DEBUG nova.compute.manager [req-7f41ad98-82c9-4eba-a681-debad6fbf2d4 req-dccc6486-a93c-4970-8ed1-ae7a9c0d4a9f service nova] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Refreshing instance network info cache due to event network-changed-a8bfb8bf-5f28-442e-86a9-bce1dbcbb9f6. {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 2138.987573] env[61215]: DEBUG oslo_concurrency.lockutils [req-7f41ad98-82c9-4eba-a681-debad6fbf2d4 req-dccc6486-a93c-4970-8ed1-ae7a9c0d4a9f service nova] Acquiring lock "refresh_cache-fb5fb791-5f62-4717-8d8f-7d56ffda15be" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2138.987721] env[61215]: DEBUG oslo_concurrency.lockutils [req-7f41ad98-82c9-4eba-a681-debad6fbf2d4 req-dccc6486-a93c-4970-8ed1-ae7a9c0d4a9f service nova] Acquired lock "refresh_cache-fb5fb791-5f62-4717-8d8f-7d56ffda15be" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2138.987887] env[61215]: DEBUG nova.network.neutron [req-7f41ad98-82c9-4eba-a681-debad6fbf2d4 req-dccc6486-a93c-4970-8ed1-ae7a9c0d4a9f service nova] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Refreshing network info cache for port a8bfb8bf-5f28-442e-86a9-bce1dbcbb9f6 {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2139.275742] env[61215]: DEBUG nova.network.neutron [req-7f41ad98-82c9-4eba-a681-debad6fbf2d4 req-dccc6486-a93c-4970-8ed1-ae7a9c0d4a9f service nova] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Updated VIF entry in instance network info cache for port a8bfb8bf-5f28-442e-86a9-bce1dbcbb9f6. {{(pid=61215) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2139.276145] env[61215]: DEBUG nova.network.neutron [req-7f41ad98-82c9-4eba-a681-debad6fbf2d4 req-dccc6486-a93c-4970-8ed1-ae7a9c0d4a9f service nova] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Updating instance_info_cache with network_info: [{"id": "a8bfb8bf-5f28-442e-86a9-bce1dbcbb9f6", "address": "fa:16:3e:b4:0b:c8", "network": {"id": "e4c6e40c-817e-4595-a4ad-907f9c2c24ff", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-195782996-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2acceb9d1de64b5a999ec1989ca85aeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bafe8721-91d4-4127-b215-d9e8e27947dc", "external-id": "nsx-vlan-transportzone-680", "segmentation_id": 680, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8bfb8bf-5f", "ovs_interfaceid": "a8bfb8bf-5f28-442e-86a9-bce1dbcbb9f6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2139.286421] env[61215]: DEBUG oslo_concurrency.lockutils [req-7f41ad98-82c9-4eba-a681-debad6fbf2d4 req-dccc6486-a93c-4970-8ed1-ae7a9c0d4a9f service nova] Releasing lock "refresh_cache-fb5fb791-5f62-4717-8d8f-7d56ffda15be" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2139.446411] env[61215]: DEBUG oslo_concurrency.lockutils [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2139.446674] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Processing image e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2139.446975] env[61215]: DEBUG oslo_concurrency.lockutils [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2149.798352] env[61215]: DEBUG oslo_concurrency.lockutils [None req-2e5fe318-670c-4983-b735-75381ac4a77f tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Acquiring lock "fb5fb791-5f62-4717-8d8f-7d56ffda15be" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2154.657868] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2160.656085] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2161.655110] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2161.655346] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2161.655507] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2165.516054] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2165.516054] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Starting heal instance info cache {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 2165.516054] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Rebuilding the list of instances to heal {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2165.531338] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2165.531497] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2165.531632] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2165.531760] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2165.531885] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2165.532055] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2165.532197] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2165.532321] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2165.532442] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2165.532559] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2165.532678] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Didn't find any instances for network info cache update. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 2165.533227] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2165.533377] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61215) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 2165.535449] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2165.544882] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2165.545154] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2165.545341] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2165.545498] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61215) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2165.546569] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46bd3065-cc1a-49ec-a17d-3d1228ad2012 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.555395] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c10860c3-baeb-4146-8375-56aa7373963c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.569735] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af94ae7a-493f-4954-b247-02564e70b004 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.576481] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2549289f-b7b3-4a8b-bb32-df79c5be4cb4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.605148] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181289MB free_disk=173GB free_vcpus=48 pci_devices=None {{(pid=61215) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2165.605251] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2165.605458] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2165.684029] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2165.684203] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance c233ab81-232d-49be-a176-bf846f0d8cc3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2165.684342] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance bb56c470-9f85-44b1-b1ec-f44236e9de51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2165.684468] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 38fe96cf-e570-4c0e-af6f-2f199efc06ff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2165.684591] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 0d609df2-621c-456f-b8ce-a209e9052153 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2165.684712] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 66420497-c0f6-4f1d-86ee-23d53400e325 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2165.684831] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 59d93243-c15c-4554-863b-779d94b3d858 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2165.684948] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance f3a3a510-a085-4388-b49d-b4371095b436 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2165.685095] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 17d70ed3-4a82-48c8-95ad-c81fb0772e42 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2165.685229] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance fb5fb791-5f62-4717-8d8f-7d56ffda15be actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2165.698932] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 455e7272-f099-496f-b929-ed6fa9a0ab44 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2165.709252] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 9eacbeb5-b918-4b0f-82f4-d06a037803df has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2165.719524] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 49ab8e42-2da3-474b-b283-9d31b089fd76 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2165.729193] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance e4fdc9ea-dad8-4422-b110-5c5aa4e25f3d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2165.729418] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2165.729568] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2165.884408] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faf8f1f9-6df3-4daf-b366-5fed8a8210b7 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.891689] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40dbceb5-0a30-4e49-bed3-e5ef295f87dd {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.921385] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-088bccf3-499a-4f3a-8381-eb2d17b3fa86 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.928412] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b447a09a-f983-46c2-859a-a062e664c072 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.941522] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2165.950392] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2165.965916] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61215) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2165.965916] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.360s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2166.086700] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2168.649831] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2183.651267] env[61215]: WARNING oslo_vmware.rw_handles [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2183.651267] env[61215]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2183.651267] env[61215]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2183.651267] env[61215]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2183.651267] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2183.651267] env[61215]: ERROR oslo_vmware.rw_handles response.begin() [ 2183.651267] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2183.651267] env[61215]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2183.651267] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2183.651267] env[61215]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2183.651267] env[61215]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2183.651267] env[61215]: ERROR oslo_vmware.rw_handles [ 2183.652321] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Downloaded image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to vmware_temp/ee025c82-e6c3-4f5a-b605-c75d630378d4/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2183.653805] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Caching image {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2183.654069] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Copying Virtual Disk [datastore1] vmware_temp/ee025c82-e6c3-4f5a-b605-c75d630378d4/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk to [datastore1] vmware_temp/ee025c82-e6c3-4f5a-b605-c75d630378d4/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk {{(pid=61215) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2183.654400] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7736cd0e-c50a-4694-8fcf-50bf4bdea7e3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2183.662490] env[61215]: DEBUG oslo_vmware.api [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Waiting for the task: (returnval){ [ 2183.662490] env[61215]: value = "task-1690415" [ 2183.662490] env[61215]: _type = "Task" [ 2183.662490] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2183.670614] env[61215]: DEBUG oslo_vmware.api [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Task: {'id': task-1690415, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2184.173091] env[61215]: DEBUG oslo_vmware.exceptions [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Fault InvalidArgument not matched. {{(pid=61215) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2184.173390] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2184.173989] env[61215]: ERROR nova.compute.manager [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2184.173989] env[61215]: Faults: ['InvalidArgument'] [ 2184.173989] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Traceback (most recent call last): [ 2184.173989] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2184.173989] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] yield resources [ 2184.173989] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2184.173989] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] self.driver.spawn(context, instance, image_meta, [ 2184.173989] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2184.173989] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2184.173989] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2184.173989] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] self._fetch_image_if_missing(context, vi) [ 2184.173989] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2184.173989] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] image_cache(vi, tmp_image_ds_loc) [ 2184.173989] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2184.173989] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] vm_util.copy_virtual_disk( [ 2184.173989] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2184.173989] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] session._wait_for_task(vmdk_copy_task) [ 2184.173989] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2184.173989] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] return self.wait_for_task(task_ref) [ 2184.173989] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2184.173989] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] return evt.wait() [ 2184.173989] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2184.173989] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] result = hub.switch() [ 2184.173989] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2184.173989] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] return self.greenlet.switch() [ 2184.173989] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2184.173989] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] self.f(*self.args, **self.kw) [ 2184.173989] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2184.173989] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] raise exceptions.translate_fault(task_info.error) [ 2184.173989] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2184.173989] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Faults: ['InvalidArgument'] [ 2184.173989] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] [ 2184.175144] env[61215]: INFO nova.compute.manager [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Terminating instance [ 2184.176074] env[61215]: DEBUG oslo_concurrency.lockutils [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2184.176301] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2184.176546] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-79cfae92-6e8e-4c47-998d-ff712d6539b7 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.178935] env[61215]: DEBUG nova.compute.manager [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2184.179149] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2184.179877] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe011ddf-22d0-4a5e-8a14-1fc5d9ba4811 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.186906] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Unregistering the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2184.187178] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-95144320-4446-404f-aafb-e5deedbf50f5 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.189415] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2184.189601] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61215) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2184.190560] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07a9a669-243e-4f4b-9078-4b4e88dccda9 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.195556] env[61215]: DEBUG oslo_vmware.api [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Waiting for the task: (returnval){ [ 2184.195556] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]5269dc3a-ce16-109e-f3b5-d889f9f73671" [ 2184.195556] env[61215]: _type = "Task" [ 2184.195556] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2184.208122] env[61215]: DEBUG oslo_vmware.api [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5269dc3a-ce16-109e-f3b5-d889f9f73671, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2184.706536] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Preparing fetch location {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2184.706851] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Creating directory with path [datastore1] vmware_temp/c643465e-c0fb-432b-8e5f-deb14d3e8a24/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2184.707095] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a7f8c0e0-2dc5-4766-ae52-a67176e1ebf9 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.727183] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Created directory with path [datastore1] vmware_temp/c643465e-c0fb-432b-8e5f-deb14d3e8a24/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2184.727411] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Fetch image to [datastore1] vmware_temp/c643465e-c0fb-432b-8e5f-deb14d3e8a24/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2184.727632] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to [datastore1] vmware_temp/c643465e-c0fb-432b-8e5f-deb14d3e8a24/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2184.728385] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf946d26-e68c-434e-9e2b-9b84aaca1db7 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.734767] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c3e716e-e0da-4d34-a753-357e5bff3960 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.743758] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eae38f2b-3078-427c-8019-38509f76fc7b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.774317] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cc14942-c296-4891-b33f-178dc81ea380 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.779543] env[61215]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1dfb7de3-b480-4404-8af0-d55b87f17779 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.800506] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2184.848186] env[61215]: DEBUG oslo_vmware.rw_handles [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c643465e-c0fb-432b-8e5f-deb14d3e8a24/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2184.910278] env[61215]: DEBUG oslo_vmware.rw_handles [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Completed reading data from the image iterator. {{(pid=61215) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2184.910481] env[61215]: DEBUG oslo_vmware.rw_handles [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c643465e-c0fb-432b-8e5f-deb14d3e8a24/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2188.189334] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Unregistered the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2188.189722] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Deleting contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2188.189762] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Deleting the datastore file [datastore1] a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2188.190043] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cfc3ec7a-4c6d-483e-9860-8fd5d4133206 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.196529] env[61215]: DEBUG oslo_vmware.api [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Waiting for the task: (returnval){ [ 2188.196529] env[61215]: value = "task-1690417" [ 2188.196529] env[61215]: _type = "Task" [ 2188.196529] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2188.203993] env[61215]: DEBUG oslo_vmware.api [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Task: {'id': task-1690417, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2188.706454] env[61215]: DEBUG oslo_vmware.api [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Task: {'id': task-1690417, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.063232} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2188.706737] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2188.706928] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Deleted contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2188.707161] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2188.707300] env[61215]: INFO nova.compute.manager [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Took 4.53 seconds to destroy the instance on the hypervisor. [ 2188.710078] env[61215]: DEBUG nova.compute.claims [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Aborting claim: {{(pid=61215) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2188.710206] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2188.710425] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2188.902944] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61d6f010-c0a4-4076-804d-6bd5b689ed06 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.910220] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30571d00-f832-4d3b-9f81-67a0fff2f239 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.939476] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69b39740-51aa-40b2-9340-fdaca872579f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.946108] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5704f1ec-487b-4301-88ff-dcf0b55b0264 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.958969] env[61215]: DEBUG nova.compute.provider_tree [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2188.967859] env[61215]: DEBUG nova.scheduler.client.report [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2188.981667] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.271s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2188.982316] env[61215]: ERROR nova.compute.manager [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2188.982316] env[61215]: Faults: ['InvalidArgument'] [ 2188.982316] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Traceback (most recent call last): [ 2188.982316] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2188.982316] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] self.driver.spawn(context, instance, image_meta, [ 2188.982316] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2188.982316] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2188.982316] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2188.982316] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] self._fetch_image_if_missing(context, vi) [ 2188.982316] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2188.982316] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] image_cache(vi, tmp_image_ds_loc) [ 2188.982316] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2188.982316] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] vm_util.copy_virtual_disk( [ 2188.982316] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2188.982316] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] session._wait_for_task(vmdk_copy_task) [ 2188.982316] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2188.982316] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] return self.wait_for_task(task_ref) [ 2188.982316] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2188.982316] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] return evt.wait() [ 2188.982316] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2188.982316] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] result = hub.switch() [ 2188.982316] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2188.982316] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] return self.greenlet.switch() [ 2188.982316] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2188.982316] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] self.f(*self.args, **self.kw) [ 2188.982316] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2188.982316] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] raise exceptions.translate_fault(task_info.error) [ 2188.982316] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2188.982316] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Faults: ['InvalidArgument'] [ 2188.982316] env[61215]: ERROR nova.compute.manager [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] [ 2188.984101] env[61215]: DEBUG nova.compute.utils [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] VimFaultException {{(pid=61215) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2188.984938] env[61215]: DEBUG nova.compute.manager [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Build of instance a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8 was re-scheduled: A specified parameter was not correct: fileType [ 2188.984938] env[61215]: Faults: ['InvalidArgument'] {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2188.985433] env[61215]: DEBUG nova.compute.manager [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Unplugging VIFs for instance {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2188.985684] env[61215]: DEBUG nova.compute.manager [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2188.985941] env[61215]: DEBUG nova.compute.manager [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2188.986205] env[61215]: DEBUG nova.network.neutron [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2189.389371] env[61215]: DEBUG nova.network.neutron [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2189.408860] env[61215]: INFO nova.compute.manager [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Took 0.42 seconds to deallocate network for instance. [ 2189.508143] env[61215]: INFO nova.scheduler.client.report [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Deleted allocations for instance a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8 [ 2189.531726] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9f46369d-0169-4343-88d9-b3c1466d3342 tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Lock "a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 642.463s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2189.533091] env[61215]: DEBUG oslo_concurrency.lockutils [None req-3d8ad015-c2e3-477c-939f-3a212c6ee8fe tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Lock "a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 446.676s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2189.533326] env[61215]: DEBUG oslo_concurrency.lockutils [None req-3d8ad015-c2e3-477c-939f-3a212c6ee8fe tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Acquiring lock "a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2189.533621] env[61215]: DEBUG oslo_concurrency.lockutils [None req-3d8ad015-c2e3-477c-939f-3a212c6ee8fe tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Lock "a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2189.533714] env[61215]: DEBUG oslo_concurrency.lockutils [None req-3d8ad015-c2e3-477c-939f-3a212c6ee8fe tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Lock "a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2189.536457] env[61215]: INFO nova.compute.manager [None req-3d8ad015-c2e3-477c-939f-3a212c6ee8fe tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Terminating instance [ 2189.538054] env[61215]: DEBUG oslo_concurrency.lockutils [None req-3d8ad015-c2e3-477c-939f-3a212c6ee8fe tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Acquiring lock "refresh_cache-a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2189.538920] env[61215]: DEBUG oslo_concurrency.lockutils [None req-3d8ad015-c2e3-477c-939f-3a212c6ee8fe tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Acquired lock "refresh_cache-a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2189.538920] env[61215]: DEBUG nova.network.neutron [None req-3d8ad015-c2e3-477c-939f-3a212c6ee8fe tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2189.555097] env[61215]: DEBUG nova.compute.manager [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2189.566158] env[61215]: DEBUG nova.network.neutron [None req-3d8ad015-c2e3-477c-939f-3a212c6ee8fe tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2189.618656] env[61215]: DEBUG oslo_concurrency.lockutils [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2189.618944] env[61215]: DEBUG oslo_concurrency.lockutils [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2189.620597] env[61215]: INFO nova.compute.claims [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2189.749132] env[61215]: DEBUG nova.network.neutron [None req-3d8ad015-c2e3-477c-939f-3a212c6ee8fe tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2189.761025] env[61215]: DEBUG oslo_concurrency.lockutils [None req-3d8ad015-c2e3-477c-939f-3a212c6ee8fe tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Releasing lock "refresh_cache-a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2189.761025] env[61215]: DEBUG nova.compute.manager [None req-3d8ad015-c2e3-477c-939f-3a212c6ee8fe tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2189.761025] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-3d8ad015-c2e3-477c-939f-3a212c6ee8fe tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2189.761025] env[61215]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b1e6b57e-742a-4eaf-860c-263910d1ae9e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.768350] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7077ffcb-8359-47d5-a0c5-711ff3f670f9 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.801489] env[61215]: WARNING nova.virt.vmwareapi.vmops [None req-3d8ad015-c2e3-477c-939f-3a212c6ee8fe tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8 could not be found. [ 2189.801700] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-3d8ad015-c2e3-477c-939f-3a212c6ee8fe tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2189.802288] env[61215]: INFO nova.compute.manager [None req-3d8ad015-c2e3-477c-939f-3a212c6ee8fe tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2189.802288] env[61215]: DEBUG oslo.service.loopingcall [None req-3d8ad015-c2e3-477c-939f-3a212c6ee8fe tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2189.802482] env[61215]: DEBUG nova.compute.manager [-] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2189.802579] env[61215]: DEBUG nova.network.neutron [-] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2189.818786] env[61215]: DEBUG nova.network.neutron [-] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2189.827301] env[61215]: DEBUG nova.network.neutron [-] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2189.835714] env[61215]: INFO nova.compute.manager [-] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] Took 0.03 seconds to deallocate network for instance. [ 2189.859574] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ee3c811-dc31-49c3-8d1b-32f900d3aacb {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.867012] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88e5d1d7-709c-4a9e-8091-2b53e28cc095 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.898481] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88abeccf-05cb-4591-8d02-1effc52f8afa {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.909012] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fb16f4e-96aa-4ecc-9d26-948c870847df {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.924669] env[61215]: DEBUG nova.compute.provider_tree [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2189.934957] env[61215]: DEBUG nova.scheduler.client.report [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2189.956343] env[61215]: DEBUG oslo_concurrency.lockutils [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.337s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2189.956863] env[61215]: DEBUG nova.compute.manager [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Start building networks asynchronously for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2189.966987] env[61215]: DEBUG oslo_concurrency.lockutils [None req-3d8ad015-c2e3-477c-939f-3a212c6ee8fe tempest-ServersV294TestFqdnHostnames-760857342 tempest-ServersV294TestFqdnHostnames-760857342-project-member] Lock "a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.434s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2189.968054] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 357.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2189.968054] env[61215]: INFO nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8] During sync_power_state the instance has a pending task (deleting). Skip. [ 2189.968223] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "a77e3dfa-4e55-4eab-9aac-e1f1fb1b4ef8" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2189.990997] env[61215]: DEBUG nova.compute.utils [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Using /dev/sd instead of None {{(pid=61215) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2189.992260] env[61215]: DEBUG nova.compute.manager [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Allocating IP information in the background. {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2189.992433] env[61215]: DEBUG nova.network.neutron [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] allocate_for_instance() {{(pid=61215) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2190.000035] env[61215]: DEBUG nova.compute.manager [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Start building block device mappings for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2190.045134] env[61215]: DEBUG nova.policy [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '98494b2f0fff4f71a4331d15cff87623', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0bbdc1f3b2144aed9baeeccd509620b0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61215) authorize /opt/stack/nova/nova/policy.py:203}} [ 2190.063404] env[61215]: DEBUG nova.compute.manager [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Start spawning the instance on the hypervisor. {{(pid=61215) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2190.087371] env[61215]: DEBUG nova.virt.hardware [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:05:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:04:45Z,direct_url=,disk_format='vmdk',id=e91f0c25-9ff9-4937-8440-f47cfb2028bc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9bc3f57c82894c5d9e08b66e77a25dc5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:04:46Z,virtual_size=,visibility=), allow threads: False {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2190.087607] env[61215]: DEBUG nova.virt.hardware [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Flavor limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2190.087767] env[61215]: DEBUG nova.virt.hardware [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Image limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2190.087949] env[61215]: DEBUG nova.virt.hardware [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Flavor pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2190.088111] env[61215]: DEBUG nova.virt.hardware [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Image pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2190.088263] env[61215]: DEBUG nova.virt.hardware [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2190.088469] env[61215]: DEBUG nova.virt.hardware [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2190.088632] env[61215]: DEBUG nova.virt.hardware [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2190.088801] env[61215]: DEBUG nova.virt.hardware [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Got 1 possible topologies {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2190.088965] env[61215]: DEBUG nova.virt.hardware [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2190.089163] env[61215]: DEBUG nova.virt.hardware [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2190.090017] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ecb4bf3-ad0d-407f-a409-57e268f958e1 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.098055] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5b2580f-84b7-4a85-9576-cb51097d788a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.353286] env[61215]: DEBUG nova.network.neutron [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Successfully created port: 191b5edc-3b2a-4be6-82a6-7a9b1f37ebd3 {{(pid=61215) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2191.405226] env[61215]: DEBUG nova.network.neutron [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Successfully updated port: 191b5edc-3b2a-4be6-82a6-7a9b1f37ebd3 {{(pid=61215) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2191.420855] env[61215]: DEBUG oslo_concurrency.lockutils [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Acquiring lock "refresh_cache-455e7272-f099-496f-b929-ed6fa9a0ab44" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2191.420992] env[61215]: DEBUG oslo_concurrency.lockutils [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Acquired lock "refresh_cache-455e7272-f099-496f-b929-ed6fa9a0ab44" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2191.421174] env[61215]: DEBUG nova.network.neutron [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2191.438737] env[61215]: DEBUG nova.compute.manager [req-42c96895-fe66-430d-a6d8-792fec361999 req-dfe921e2-85c5-426e-8cb4-f7c62936e0c2 service nova] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Received event network-vif-plugged-191b5edc-3b2a-4be6-82a6-7a9b1f37ebd3 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 2191.438737] env[61215]: DEBUG oslo_concurrency.lockutils [req-42c96895-fe66-430d-a6d8-792fec361999 req-dfe921e2-85c5-426e-8cb4-f7c62936e0c2 service nova] Acquiring lock "455e7272-f099-496f-b929-ed6fa9a0ab44-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2191.438737] env[61215]: DEBUG oslo_concurrency.lockutils [req-42c96895-fe66-430d-a6d8-792fec361999 req-dfe921e2-85c5-426e-8cb4-f7c62936e0c2 service nova] Lock "455e7272-f099-496f-b929-ed6fa9a0ab44-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2191.438737] env[61215]: DEBUG oslo_concurrency.lockutils [req-42c96895-fe66-430d-a6d8-792fec361999 req-dfe921e2-85c5-426e-8cb4-f7c62936e0c2 service nova] Lock "455e7272-f099-496f-b929-ed6fa9a0ab44-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2191.438737] env[61215]: DEBUG nova.compute.manager [req-42c96895-fe66-430d-a6d8-792fec361999 req-dfe921e2-85c5-426e-8cb4-f7c62936e0c2 service nova] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] No waiting events found dispatching network-vif-plugged-191b5edc-3b2a-4be6-82a6-7a9b1f37ebd3 {{(pid=61215) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2191.438737] env[61215]: WARNING nova.compute.manager [req-42c96895-fe66-430d-a6d8-792fec361999 req-dfe921e2-85c5-426e-8cb4-f7c62936e0c2 service nova] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Received unexpected event network-vif-plugged-191b5edc-3b2a-4be6-82a6-7a9b1f37ebd3 for instance with vm_state building and task_state spawning. [ 2191.438737] env[61215]: DEBUG nova.compute.manager [req-42c96895-fe66-430d-a6d8-792fec361999 req-dfe921e2-85c5-426e-8cb4-f7c62936e0c2 service nova] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Received event network-changed-191b5edc-3b2a-4be6-82a6-7a9b1f37ebd3 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 2191.439050] env[61215]: DEBUG nova.compute.manager [req-42c96895-fe66-430d-a6d8-792fec361999 req-dfe921e2-85c5-426e-8cb4-f7c62936e0c2 service nova] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Refreshing instance network info cache due to event network-changed-191b5edc-3b2a-4be6-82a6-7a9b1f37ebd3. {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 2191.439118] env[61215]: DEBUG oslo_concurrency.lockutils [req-42c96895-fe66-430d-a6d8-792fec361999 req-dfe921e2-85c5-426e-8cb4-f7c62936e0c2 service nova] Acquiring lock "refresh_cache-455e7272-f099-496f-b929-ed6fa9a0ab44" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2191.473486] env[61215]: DEBUG nova.network.neutron [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2191.651408] env[61215]: DEBUG nova.network.neutron [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Updating instance_info_cache with network_info: [{"id": "191b5edc-3b2a-4be6-82a6-7a9b1f37ebd3", "address": "fa:16:3e:64:6f:97", "network": {"id": "b4b1a623-1b6c-4072-9048-f30715dcc3c8", "bridge": "br-int", "label": "tempest-ServersTestJSON-440786615-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0bbdc1f3b2144aed9baeeccd509620b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap191b5edc-3b", "ovs_interfaceid": "191b5edc-3b2a-4be6-82a6-7a9b1f37ebd3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2191.663275] env[61215]: DEBUG oslo_concurrency.lockutils [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Releasing lock "refresh_cache-455e7272-f099-496f-b929-ed6fa9a0ab44" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2191.663563] env[61215]: DEBUG nova.compute.manager [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Instance network_info: |[{"id": "191b5edc-3b2a-4be6-82a6-7a9b1f37ebd3", "address": "fa:16:3e:64:6f:97", "network": {"id": "b4b1a623-1b6c-4072-9048-f30715dcc3c8", "bridge": "br-int", "label": "tempest-ServersTestJSON-440786615-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0bbdc1f3b2144aed9baeeccd509620b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap191b5edc-3b", "ovs_interfaceid": "191b5edc-3b2a-4be6-82a6-7a9b1f37ebd3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2191.663865] env[61215]: DEBUG oslo_concurrency.lockutils [req-42c96895-fe66-430d-a6d8-792fec361999 req-dfe921e2-85c5-426e-8cb4-f7c62936e0c2 service nova] Acquired lock "refresh_cache-455e7272-f099-496f-b929-ed6fa9a0ab44" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2191.664060] env[61215]: DEBUG nova.network.neutron [req-42c96895-fe66-430d-a6d8-792fec361999 req-dfe921e2-85c5-426e-8cb4-f7c62936e0c2 service nova] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Refreshing network info cache for port 191b5edc-3b2a-4be6-82a6-7a9b1f37ebd3 {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2191.665196] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:64:6f:97', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '191b5edc-3b2a-4be6-82a6-7a9b1f37ebd3', 'vif_model': 'vmxnet3'}] {{(pid=61215) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2191.673399] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Creating folder: Project (0bbdc1f3b2144aed9baeeccd509620b0). Parent ref: group-v352463. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2191.676417] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3108366f-f790-41a2-8cad-e5a8b9c65262 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.687476] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Created folder: Project (0bbdc1f3b2144aed9baeeccd509620b0) in parent group-v352463. [ 2191.687669] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Creating folder: Instances. Parent ref: group-v352555. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2191.687894] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-62a1e505-c9db-47c6-8205-93dea52138ab {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.697007] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Created folder: Instances in parent group-v352555. [ 2191.697297] env[61215]: DEBUG oslo.service.loopingcall [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2191.697798] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Creating VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2191.697798] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7ec40752-b2d8-4897-95d8-cfb3ce667f0a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.717150] env[61215]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2191.717150] env[61215]: value = "task-1690420" [ 2191.717150] env[61215]: _type = "Task" [ 2191.717150] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2191.726826] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690420, 'name': CreateVM_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2192.033770] env[61215]: DEBUG nova.network.neutron [req-42c96895-fe66-430d-a6d8-792fec361999 req-dfe921e2-85c5-426e-8cb4-f7c62936e0c2 service nova] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Updated VIF entry in instance network info cache for port 191b5edc-3b2a-4be6-82a6-7a9b1f37ebd3. {{(pid=61215) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2192.034189] env[61215]: DEBUG nova.network.neutron [req-42c96895-fe66-430d-a6d8-792fec361999 req-dfe921e2-85c5-426e-8cb4-f7c62936e0c2 service nova] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Updating instance_info_cache with network_info: [{"id": "191b5edc-3b2a-4be6-82a6-7a9b1f37ebd3", "address": "fa:16:3e:64:6f:97", "network": {"id": "b4b1a623-1b6c-4072-9048-f30715dcc3c8", "bridge": "br-int", "label": "tempest-ServersTestJSON-440786615-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0bbdc1f3b2144aed9baeeccd509620b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap191b5edc-3b", "ovs_interfaceid": "191b5edc-3b2a-4be6-82a6-7a9b1f37ebd3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2192.044130] env[61215]: DEBUG oslo_concurrency.lockutils [req-42c96895-fe66-430d-a6d8-792fec361999 req-dfe921e2-85c5-426e-8cb4-f7c62936e0c2 service nova] Releasing lock "refresh_cache-455e7272-f099-496f-b929-ed6fa9a0ab44" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2192.229991] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690420, 'name': CreateVM_Task, 'duration_secs': 0.30522} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2192.230195] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Created VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2192.230883] env[61215]: DEBUG oslo_concurrency.lockutils [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2192.231065] env[61215]: DEBUG oslo_concurrency.lockutils [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2192.231417] env[61215]: DEBUG oslo_concurrency.lockutils [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2192.231677] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5802b688-2e7c-4e4e-be3d-78a67f369f96 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.236533] env[61215]: DEBUG oslo_vmware.api [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Waiting for the task: (returnval){ [ 2192.236533] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52c43075-38be-43df-06af-064be68d57b4" [ 2192.236533] env[61215]: _type = "Task" [ 2192.236533] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2192.243640] env[61215]: DEBUG oslo_vmware.api [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52c43075-38be-43df-06af-064be68d57b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2192.746583] env[61215]: DEBUG oslo_concurrency.lockutils [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2192.746884] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Processing image e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2192.747073] env[61215]: DEBUG oslo_concurrency.lockutils [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2197.529893] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a6183a1f-6ed6-4aa6-ba60-00f69d20af84 tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Acquiring lock "455e7272-f099-496f-b929-ed6fa9a0ab44" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2215.675250] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2221.653664] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2222.655399] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2222.655758] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2222.655758] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2224.654599] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2224.668021] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2224.668179] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2224.668353] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2224.668512] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61215) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2224.669620] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b62f629-11ae-49d7-a89a-d6921148a528 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2224.678550] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bf83809-73e1-4ec3-a708-4cf8f79b99dc {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2224.692429] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc8e4c60-d28d-4a9a-a878-269fe159cd81 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2224.698720] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c93b066-374b-4661-8714-4311b78f31b1 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2224.726984] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181303MB free_disk=173GB free_vcpus=48 pci_devices=None {{(pid=61215) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2224.727098] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2224.727291] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2224.802395] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance c233ab81-232d-49be-a176-bf846f0d8cc3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2224.802551] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance bb56c470-9f85-44b1-b1ec-f44236e9de51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2224.802682] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 38fe96cf-e570-4c0e-af6f-2f199efc06ff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2224.802837] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 0d609df2-621c-456f-b8ce-a209e9052153 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2224.802956] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 66420497-c0f6-4f1d-86ee-23d53400e325 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2224.803086] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 59d93243-c15c-4554-863b-779d94b3d858 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2224.803204] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance f3a3a510-a085-4388-b49d-b4371095b436 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2224.803318] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 17d70ed3-4a82-48c8-95ad-c81fb0772e42 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2224.803430] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance fb5fb791-5f62-4717-8d8f-7d56ffda15be actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2224.803543] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 455e7272-f099-496f-b929-ed6fa9a0ab44 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2224.813694] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 9eacbeb5-b918-4b0f-82f4-d06a037803df has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2224.823500] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 49ab8e42-2da3-474b-b283-9d31b089fd76 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2224.832179] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance e4fdc9ea-dad8-4422-b110-5c5aa4e25f3d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2224.832381] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2224.832524] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2224.975418] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a99f86a-ac37-42ca-bc40-3af22789a645 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2224.982903] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76ef60cc-973d-44cc-b293-87015f97c9fa {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2225.011880] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54363bf2-eaef-4717-a407-9badc5c1630e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2225.018953] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1264e3c-4278-410a-85fd-15192617e419 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2225.031633] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2225.040319] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2225.052999] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61215) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2225.053200] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.326s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2227.053640] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2227.053991] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Starting heal instance info cache {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 2227.053991] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Rebuilding the list of instances to heal {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2227.073969] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2227.074162] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2227.074266] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2227.074393] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2227.074517] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2227.074640] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2227.074802] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2227.074920] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2227.075055] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2227.075178] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2227.075299] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Didn't find any instances for network info cache update. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 2227.075812] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2227.076041] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2227.076185] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61215) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 2232.306623] env[61215]: WARNING oslo_vmware.rw_handles [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2232.306623] env[61215]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2232.306623] env[61215]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2232.306623] env[61215]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2232.306623] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2232.306623] env[61215]: ERROR oslo_vmware.rw_handles response.begin() [ 2232.306623] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2232.306623] env[61215]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2232.306623] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2232.306623] env[61215]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2232.306623] env[61215]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2232.306623] env[61215]: ERROR oslo_vmware.rw_handles [ 2232.307218] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Downloaded image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to vmware_temp/c643465e-c0fb-432b-8e5f-deb14d3e8a24/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2232.309337] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Caching image {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2232.309650] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Copying Virtual Disk [datastore1] vmware_temp/c643465e-c0fb-432b-8e5f-deb14d3e8a24/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk to [datastore1] vmware_temp/c643465e-c0fb-432b-8e5f-deb14d3e8a24/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk {{(pid=61215) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2232.309979] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-abe74f00-de4a-4c1f-b097-a3af869d0e3a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2232.319095] env[61215]: DEBUG oslo_vmware.api [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Waiting for the task: (returnval){ [ 2232.319095] env[61215]: value = "task-1690421" [ 2232.319095] env[61215]: _type = "Task" [ 2232.319095] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2232.326942] env[61215]: DEBUG oslo_vmware.api [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Task: {'id': task-1690421, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2232.829785] env[61215]: DEBUG oslo_vmware.exceptions [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Fault InvalidArgument not matched. {{(pid=61215) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2232.831794] env[61215]: DEBUG oslo_concurrency.lockutils [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2232.831794] env[61215]: ERROR nova.compute.manager [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2232.831794] env[61215]: Faults: ['InvalidArgument'] [ 2232.831794] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Traceback (most recent call last): [ 2232.831794] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2232.831794] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] yield resources [ 2232.831794] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2232.831794] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] self.driver.spawn(context, instance, image_meta, [ 2232.831794] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2232.831794] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2232.831794] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2232.831794] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] self._fetch_image_if_missing(context, vi) [ 2232.831794] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2232.831794] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] image_cache(vi, tmp_image_ds_loc) [ 2232.831794] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2232.831794] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] vm_util.copy_virtual_disk( [ 2232.831794] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2232.831794] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] session._wait_for_task(vmdk_copy_task) [ 2232.831794] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2232.831794] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] return self.wait_for_task(task_ref) [ 2232.831794] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2232.831794] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] return evt.wait() [ 2232.831794] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2232.831794] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] result = hub.switch() [ 2232.831794] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2232.831794] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] return self.greenlet.switch() [ 2232.831794] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2232.831794] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] self.f(*self.args, **self.kw) [ 2232.831794] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2232.831794] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] raise exceptions.translate_fault(task_info.error) [ 2232.831794] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2232.831794] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Faults: ['InvalidArgument'] [ 2232.831794] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] [ 2232.831794] env[61215]: INFO nova.compute.manager [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Terminating instance [ 2232.832969] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2232.832969] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2232.833970] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6bf5ae6d-6dc1-4e1c-ad63-5b179a453b9c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2232.836745] env[61215]: DEBUG nova.compute.manager [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2232.836936] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2232.837656] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40ff9714-3209-4ee7-8f3e-079e87349c2e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2232.843845] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Unregistering the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2232.844059] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6c3ede76-7136-47c0-b75d-0e24d5962e3f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2232.846339] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2232.846509] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61215) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2232.847196] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb0b107f-2093-4fe9-81c9-d91efdb1dcb3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2232.852016] env[61215]: DEBUG oslo_vmware.api [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Waiting for the task: (returnval){ [ 2232.852016] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52b07b89-c53b-6d18-7daa-821980455918" [ 2232.852016] env[61215]: _type = "Task" [ 2232.852016] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2232.858728] env[61215]: DEBUG oslo_vmware.api [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52b07b89-c53b-6d18-7daa-821980455918, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2233.362862] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Preparing fetch location {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2233.363195] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Creating directory with path [datastore1] vmware_temp/6e09cddf-f381-4605-8897-b03d73cdd783/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2233.363903] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-13692923-59fe-4b7c-b55f-9c200f83a2a5 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.384056] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Created directory with path [datastore1] vmware_temp/6e09cddf-f381-4605-8897-b03d73cdd783/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2233.384282] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Fetch image to [datastore1] vmware_temp/6e09cddf-f381-4605-8897-b03d73cdd783/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2233.384461] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to [datastore1] vmware_temp/6e09cddf-f381-4605-8897-b03d73cdd783/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2233.385302] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39b0d012-9d08-4c85-8e7f-1140799fc3a4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.392417] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ea94f12-c1f2-47f3-83c9-e3b067d7e298 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.402037] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb921508-7151-44bf-b883-9f3dfffa7814 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.435461] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5086c57-5275-4b66-be71-ee511735bd2a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.441233] env[61215]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6f411580-94aa-4db8-984f-a958b44cc26b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.461543] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2233.509939] env[61215]: DEBUG oslo_vmware.rw_handles [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6e09cddf-f381-4605-8897-b03d73cdd783/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2233.570746] env[61215]: DEBUG oslo_vmware.rw_handles [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Completed reading data from the image iterator. {{(pid=61215) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2233.570993] env[61215]: DEBUG oslo_vmware.rw_handles [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6e09cddf-f381-4605-8897-b03d73cdd783/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2234.192870] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Unregistered the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2234.193077] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Deleting contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2234.193267] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Deleting the datastore file [datastore1] c233ab81-232d-49be-a176-bf846f0d8cc3 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2234.193529] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1e14f42c-7760-412b-a86c-f4d8220ab1b8 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.199732] env[61215]: DEBUG oslo_vmware.api [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Waiting for the task: (returnval){ [ 2234.199732] env[61215]: value = "task-1690423" [ 2234.199732] env[61215]: _type = "Task" [ 2234.199732] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2234.207350] env[61215]: DEBUG oslo_vmware.api [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Task: {'id': task-1690423, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2234.710253] env[61215]: DEBUG oslo_vmware.api [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Task: {'id': task-1690423, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.0686} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2234.710605] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2234.710792] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Deleted contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2234.710975] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2234.711171] env[61215]: INFO nova.compute.manager [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Took 1.87 seconds to destroy the instance on the hypervisor. [ 2234.715385] env[61215]: DEBUG nova.compute.claims [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Aborting claim: {{(pid=61215) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2234.715535] env[61215]: DEBUG oslo_concurrency.lockutils [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2234.715759] env[61215]: DEBUG oslo_concurrency.lockutils [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2234.907188] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38179893-9754-460f-82a6-2c3605d7566a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.914148] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c674b92-b320-4971-a5e7-16aa2b5f3e39 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.942945] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77e20d2b-a442-47b9-aa95-25e8fc0b1464 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.949689] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31233db4-d6a0-4d22-91b8-10febc165799 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.963178] env[61215]: DEBUG nova.compute.provider_tree [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2234.971369] env[61215]: DEBUG nova.scheduler.client.report [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2234.984221] env[61215]: DEBUG oslo_concurrency.lockutils [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.268s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2234.984728] env[61215]: ERROR nova.compute.manager [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2234.984728] env[61215]: Faults: ['InvalidArgument'] [ 2234.984728] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Traceback (most recent call last): [ 2234.984728] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2234.984728] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] self.driver.spawn(context, instance, image_meta, [ 2234.984728] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2234.984728] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2234.984728] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2234.984728] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] self._fetch_image_if_missing(context, vi) [ 2234.984728] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2234.984728] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] image_cache(vi, tmp_image_ds_loc) [ 2234.984728] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2234.984728] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] vm_util.copy_virtual_disk( [ 2234.984728] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2234.984728] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] session._wait_for_task(vmdk_copy_task) [ 2234.984728] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2234.984728] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] return self.wait_for_task(task_ref) [ 2234.984728] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2234.984728] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] return evt.wait() [ 2234.984728] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2234.984728] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] result = hub.switch() [ 2234.984728] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2234.984728] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] return self.greenlet.switch() [ 2234.984728] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2234.984728] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] self.f(*self.args, **self.kw) [ 2234.984728] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2234.984728] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] raise exceptions.translate_fault(task_info.error) [ 2234.984728] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2234.984728] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Faults: ['InvalidArgument'] [ 2234.984728] env[61215]: ERROR nova.compute.manager [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] [ 2234.985598] env[61215]: DEBUG nova.compute.utils [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] VimFaultException {{(pid=61215) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2234.986804] env[61215]: DEBUG nova.compute.manager [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Build of instance c233ab81-232d-49be-a176-bf846f0d8cc3 was re-scheduled: A specified parameter was not correct: fileType [ 2234.986804] env[61215]: Faults: ['InvalidArgument'] {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2234.987192] env[61215]: DEBUG nova.compute.manager [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Unplugging VIFs for instance {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2234.987366] env[61215]: DEBUG nova.compute.manager [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2234.987538] env[61215]: DEBUG nova.compute.manager [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2234.987702] env[61215]: DEBUG nova.network.neutron [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2235.409356] env[61215]: DEBUG nova.network.neutron [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2235.419392] env[61215]: INFO nova.compute.manager [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Took 0.43 seconds to deallocate network for instance. [ 2235.538856] env[61215]: INFO nova.scheduler.client.report [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Deleted allocations for instance c233ab81-232d-49be-a176-bf846f0d8cc3 [ 2235.560677] env[61215]: DEBUG oslo_concurrency.lockutils [None req-38584cb7-daa7-4ce1-88a3-c18ace199f27 tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Lock "c233ab81-232d-49be-a176-bf846f0d8cc3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 632.299s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2235.562384] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d3b902a4-d4d7-4d6e-88a3-44a06ffa516e tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Lock "c233ab81-232d-49be-a176-bf846f0d8cc3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 436.656s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2235.562735] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d3b902a4-d4d7-4d6e-88a3-44a06ffa516e tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Acquiring lock "c233ab81-232d-49be-a176-bf846f0d8cc3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2235.562999] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d3b902a4-d4d7-4d6e-88a3-44a06ffa516e tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Lock "c233ab81-232d-49be-a176-bf846f0d8cc3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2235.563225] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d3b902a4-d4d7-4d6e-88a3-44a06ffa516e tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Lock "c233ab81-232d-49be-a176-bf846f0d8cc3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2235.566448] env[61215]: INFO nova.compute.manager [None req-d3b902a4-d4d7-4d6e-88a3-44a06ffa516e tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Terminating instance [ 2235.568888] env[61215]: DEBUG nova.compute.manager [None req-d3b902a4-d4d7-4d6e-88a3-44a06ffa516e tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2235.570123] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-d3b902a4-d4d7-4d6e-88a3-44a06ffa516e tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2235.570123] env[61215]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9effe309-110d-41aa-a812-27bf53c399e2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.581072] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-201beead-c8ee-42b4-9113-e235a162d9e3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.594042] env[61215]: DEBUG nova.compute.manager [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2235.614516] env[61215]: WARNING nova.virt.vmwareapi.vmops [None req-d3b902a4-d4d7-4d6e-88a3-44a06ffa516e tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c233ab81-232d-49be-a176-bf846f0d8cc3 could not be found. [ 2235.614728] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-d3b902a4-d4d7-4d6e-88a3-44a06ffa516e tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2235.614925] env[61215]: INFO nova.compute.manager [None req-d3b902a4-d4d7-4d6e-88a3-44a06ffa516e tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Took 0.05 seconds to destroy the instance on the hypervisor. [ 2235.615218] env[61215]: DEBUG oslo.service.loopingcall [None req-d3b902a4-d4d7-4d6e-88a3-44a06ffa516e tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2235.615471] env[61215]: DEBUG nova.compute.manager [-] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2235.615570] env[61215]: DEBUG nova.network.neutron [-] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2235.641790] env[61215]: DEBUG oslo_concurrency.lockutils [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2235.642053] env[61215]: DEBUG oslo_concurrency.lockutils [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2235.643496] env[61215]: INFO nova.compute.claims [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2235.659909] env[61215]: DEBUG nova.network.neutron [-] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2235.679384] env[61215]: INFO nova.compute.manager [-] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] Took 0.06 seconds to deallocate network for instance. [ 2235.767084] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d3b902a4-d4d7-4d6e-88a3-44a06ffa516e tempest-ImagesNegativeTestJSON-1372788275 tempest-ImagesNegativeTestJSON-1372788275-project-member] Lock "c233ab81-232d-49be-a176-bf846f0d8cc3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.205s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2235.768225] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "c233ab81-232d-49be-a176-bf846f0d8cc3" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 402.800s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2235.768313] env[61215]: INFO nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: c233ab81-232d-49be-a176-bf846f0d8cc3] During sync_power_state the instance has a pending task (deleting). Skip. [ 2235.768538] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "c233ab81-232d-49be-a176-bf846f0d8cc3" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2235.849893] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e48e4450-e938-4ee5-98f3-1035d29b0071 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.857927] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6af6e7c9-e78c-4db3-a878-f86d8aa7232d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.887773] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d522cfb-b7f8-4527-aa82-734ce8d370ab {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.894871] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9787f57-5748-40f7-8990-ac22779900d3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.909217] env[61215]: DEBUG nova.compute.provider_tree [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2235.919516] env[61215]: DEBUG nova.scheduler.client.report [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2235.932821] env[61215]: DEBUG oslo_concurrency.lockutils [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.291s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2235.933312] env[61215]: DEBUG nova.compute.manager [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Start building networks asynchronously for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2235.964837] env[61215]: DEBUG nova.compute.utils [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Using /dev/sd instead of None {{(pid=61215) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2235.966124] env[61215]: DEBUG nova.compute.manager [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Allocating IP information in the background. {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2235.966308] env[61215]: DEBUG nova.network.neutron [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] allocate_for_instance() {{(pid=61215) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2235.976429] env[61215]: DEBUG nova.compute.manager [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Start building block device mappings for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2236.033903] env[61215]: DEBUG nova.policy [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c1cfa32e7efb40b88a7565fb525635bf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '33bc963e963d493f8bad1328351cd968', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61215) authorize /opt/stack/nova/nova/policy.py:203}} [ 2236.037033] env[61215]: DEBUG nova.compute.manager [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Start spawning the instance on the hypervisor. {{(pid=61215) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2236.061385] env[61215]: DEBUG nova.virt.hardware [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:05:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:04:45Z,direct_url=,disk_format='vmdk',id=e91f0c25-9ff9-4937-8440-f47cfb2028bc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9bc3f57c82894c5d9e08b66e77a25dc5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:04:46Z,virtual_size=,visibility=), allow threads: False {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2236.061674] env[61215]: DEBUG nova.virt.hardware [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Flavor limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2236.061859] env[61215]: DEBUG nova.virt.hardware [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Image limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2236.062230] env[61215]: DEBUG nova.virt.hardware [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Flavor pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2236.062230] env[61215]: DEBUG nova.virt.hardware [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Image pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2236.062340] env[61215]: DEBUG nova.virt.hardware [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2236.062550] env[61215]: DEBUG nova.virt.hardware [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2236.062711] env[61215]: DEBUG nova.virt.hardware [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2236.062901] env[61215]: DEBUG nova.virt.hardware [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Got 1 possible topologies {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2236.063104] env[61215]: DEBUG nova.virt.hardware [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2236.063298] env[61215]: DEBUG nova.virt.hardware [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2236.064156] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-729735f7-e6a9-46d1-89b5-a5b2456aaadd {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.071762] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-846a16f6-15ba-4f12-aa98-35b45266cca8 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.327928] env[61215]: DEBUG nova.network.neutron [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Successfully created port: 35de7a71-6029-4959-8079-e4f744353ee2 {{(pid=61215) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2236.904468] env[61215]: DEBUG nova.network.neutron [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Successfully created port: fa5c8f31-2ea3-45e3-a55c-45f97b50481a {{(pid=61215) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2237.521431] env[61215]: DEBUG nova.compute.manager [req-07c06607-3fe0-4129-921f-36a2074fc3ac req-3a02841f-d2ff-4d57-aa28-561aea044259 service nova] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Received event network-vif-plugged-35de7a71-6029-4959-8079-e4f744353ee2 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 2237.521667] env[61215]: DEBUG oslo_concurrency.lockutils [req-07c06607-3fe0-4129-921f-36a2074fc3ac req-3a02841f-d2ff-4d57-aa28-561aea044259 service nova] Acquiring lock "9eacbeb5-b918-4b0f-82f4-d06a037803df-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2237.521881] env[61215]: DEBUG oslo_concurrency.lockutils [req-07c06607-3fe0-4129-921f-36a2074fc3ac req-3a02841f-d2ff-4d57-aa28-561aea044259 service nova] Lock "9eacbeb5-b918-4b0f-82f4-d06a037803df-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2237.522142] env[61215]: DEBUG oslo_concurrency.lockutils [req-07c06607-3fe0-4129-921f-36a2074fc3ac req-3a02841f-d2ff-4d57-aa28-561aea044259 service nova] Lock "9eacbeb5-b918-4b0f-82f4-d06a037803df-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2237.522352] env[61215]: DEBUG nova.compute.manager [req-07c06607-3fe0-4129-921f-36a2074fc3ac req-3a02841f-d2ff-4d57-aa28-561aea044259 service nova] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] No waiting events found dispatching network-vif-plugged-35de7a71-6029-4959-8079-e4f744353ee2 {{(pid=61215) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2237.522538] env[61215]: WARNING nova.compute.manager [req-07c06607-3fe0-4129-921f-36a2074fc3ac req-3a02841f-d2ff-4d57-aa28-561aea044259 service nova] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Received unexpected event network-vif-plugged-35de7a71-6029-4959-8079-e4f744353ee2 for instance with vm_state building and task_state spawning. [ 2237.572097] env[61215]: DEBUG nova.network.neutron [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Successfully updated port: 35de7a71-6029-4959-8079-e4f744353ee2 {{(pid=61215) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2238.304365] env[61215]: DEBUG nova.network.neutron [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Successfully updated port: fa5c8f31-2ea3-45e3-a55c-45f97b50481a {{(pid=61215) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2238.316464] env[61215]: DEBUG oslo_concurrency.lockutils [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Acquiring lock "refresh_cache-9eacbeb5-b918-4b0f-82f4-d06a037803df" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2238.316614] env[61215]: DEBUG oslo_concurrency.lockutils [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Acquired lock "refresh_cache-9eacbeb5-b918-4b0f-82f4-d06a037803df" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2238.316767] env[61215]: DEBUG nova.network.neutron [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2238.358862] env[61215]: DEBUG nova.network.neutron [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2238.721766] env[61215]: DEBUG nova.network.neutron [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Updating instance_info_cache with network_info: [{"id": "35de7a71-6029-4959-8079-e4f744353ee2", "address": "fa:16:3e:6f:f1:9d", "network": {"id": "83b19755-5dfb-48e1-a3d0-74d29338ada8", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-881124371", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.148", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33bc963e963d493f8bad1328351cd968", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7f41333-42ee-47f3-936c-d6701ab786d2", "external-id": "nsx-vlan-transportzone-674", "segmentation_id": 674, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35de7a71-60", "ovs_interfaceid": "35de7a71-6029-4959-8079-e4f744353ee2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "fa5c8f31-2ea3-45e3-a55c-45f97b50481a", "address": "fa:16:3e:88:4d:00", "network": {"id": "b985f3bf-9f29-46bf-8339-d1325e15d056", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-703356015", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.240", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "33bc963e963d493f8bad1328351cd968", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce", "external-id": "nsx-vlan-transportzone-725", "segmentation_id": 725, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa5c8f31-2e", "ovs_interfaceid": "fa5c8f31-2ea3-45e3-a55c-45f97b50481a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2238.735583] env[61215]: DEBUG oslo_concurrency.lockutils [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Releasing lock "refresh_cache-9eacbeb5-b918-4b0f-82f4-d06a037803df" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2238.736019] env[61215]: DEBUG nova.compute.manager [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Instance network_info: |[{"id": "35de7a71-6029-4959-8079-e4f744353ee2", "address": "fa:16:3e:6f:f1:9d", "network": {"id": "83b19755-5dfb-48e1-a3d0-74d29338ada8", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-881124371", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.148", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33bc963e963d493f8bad1328351cd968", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7f41333-42ee-47f3-936c-d6701ab786d2", "external-id": "nsx-vlan-transportzone-674", "segmentation_id": 674, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35de7a71-60", "ovs_interfaceid": "35de7a71-6029-4959-8079-e4f744353ee2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "fa5c8f31-2ea3-45e3-a55c-45f97b50481a", "address": "fa:16:3e:88:4d:00", "network": {"id": "b985f3bf-9f29-46bf-8339-d1325e15d056", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-703356015", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.240", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "33bc963e963d493f8bad1328351cd968", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce", "external-id": "nsx-vlan-transportzone-725", "segmentation_id": 725, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa5c8f31-2e", "ovs_interfaceid": "fa5c8f31-2ea3-45e3-a55c-45f97b50481a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2238.736544] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6f:f1:9d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e7f41333-42ee-47f3-936c-d6701ab786d2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '35de7a71-6029-4959-8079-e4f744353ee2', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:88:4d:00', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fa5c8f31-2ea3-45e3-a55c-45f97b50481a', 'vif_model': 'vmxnet3'}] {{(pid=61215) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2238.746646] env[61215]: DEBUG oslo.service.loopingcall [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2238.747210] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Creating VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2238.747443] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-41a4da37-fe8f-4615-b79c-ccb226bf49eb {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2238.770422] env[61215]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2238.770422] env[61215]: value = "task-1690424" [ 2238.770422] env[61215]: _type = "Task" [ 2238.770422] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2238.779378] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690424, 'name': CreateVM_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2239.281687] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690424, 'name': CreateVM_Task, 'duration_secs': 0.333075} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2239.281865] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Created VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2239.282657] env[61215]: DEBUG oslo_concurrency.lockutils [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2239.282845] env[61215]: DEBUG oslo_concurrency.lockutils [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2239.283202] env[61215]: DEBUG oslo_concurrency.lockutils [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2239.283455] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d12984ed-f50c-47c7-8365-8368e1bb028c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2239.288215] env[61215]: DEBUG oslo_vmware.api [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Waiting for the task: (returnval){ [ 2239.288215] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]529f8663-a67d-f64c-295e-5545d6a24dc5" [ 2239.288215] env[61215]: _type = "Task" [ 2239.288215] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2239.296242] env[61215]: DEBUG oslo_vmware.api [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]529f8663-a67d-f64c-295e-5545d6a24dc5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2239.546569] env[61215]: DEBUG nova.compute.manager [req-6743c50a-6539-4142-9111-0f7641f85262 req-3c3c2016-bd3a-4827-ae3e-9783a46e8373 service nova] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Received event network-changed-35de7a71-6029-4959-8079-e4f744353ee2 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 2239.547047] env[61215]: DEBUG nova.compute.manager [req-6743c50a-6539-4142-9111-0f7641f85262 req-3c3c2016-bd3a-4827-ae3e-9783a46e8373 service nova] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Refreshing instance network info cache due to event network-changed-35de7a71-6029-4959-8079-e4f744353ee2. {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 2239.547047] env[61215]: DEBUG oslo_concurrency.lockutils [req-6743c50a-6539-4142-9111-0f7641f85262 req-3c3c2016-bd3a-4827-ae3e-9783a46e8373 service nova] Acquiring lock "refresh_cache-9eacbeb5-b918-4b0f-82f4-d06a037803df" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2239.547318] env[61215]: DEBUG oslo_concurrency.lockutils [req-6743c50a-6539-4142-9111-0f7641f85262 req-3c3c2016-bd3a-4827-ae3e-9783a46e8373 service nova] Acquired lock "refresh_cache-9eacbeb5-b918-4b0f-82f4-d06a037803df" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2239.547468] env[61215]: DEBUG nova.network.neutron [req-6743c50a-6539-4142-9111-0f7641f85262 req-3c3c2016-bd3a-4827-ae3e-9783a46e8373 service nova] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Refreshing network info cache for port 35de7a71-6029-4959-8079-e4f744353ee2 {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2239.798711] env[61215]: DEBUG oslo_concurrency.lockutils [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2239.798962] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Processing image e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2239.799203] env[61215]: DEBUG oslo_concurrency.lockutils [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2239.816578] env[61215]: DEBUG nova.network.neutron [req-6743c50a-6539-4142-9111-0f7641f85262 req-3c3c2016-bd3a-4827-ae3e-9783a46e8373 service nova] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Updated VIF entry in instance network info cache for port 35de7a71-6029-4959-8079-e4f744353ee2. {{(pid=61215) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2239.817010] env[61215]: DEBUG nova.network.neutron [req-6743c50a-6539-4142-9111-0f7641f85262 req-3c3c2016-bd3a-4827-ae3e-9783a46e8373 service nova] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Updating instance_info_cache with network_info: [{"id": "35de7a71-6029-4959-8079-e4f744353ee2", "address": "fa:16:3e:6f:f1:9d", "network": {"id": "83b19755-5dfb-48e1-a3d0-74d29338ada8", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-881124371", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.148", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33bc963e963d493f8bad1328351cd968", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7f41333-42ee-47f3-936c-d6701ab786d2", "external-id": "nsx-vlan-transportzone-674", "segmentation_id": 674, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35de7a71-60", "ovs_interfaceid": "35de7a71-6029-4959-8079-e4f744353ee2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "fa5c8f31-2ea3-45e3-a55c-45f97b50481a", "address": "fa:16:3e:88:4d:00", "network": {"id": "b985f3bf-9f29-46bf-8339-d1325e15d056", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-703356015", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.240", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "33bc963e963d493f8bad1328351cd968", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce", "external-id": "nsx-vlan-transportzone-725", "segmentation_id": 725, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa5c8f31-2e", "ovs_interfaceid": "fa5c8f31-2ea3-45e3-a55c-45f97b50481a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2239.826354] env[61215]: DEBUG oslo_concurrency.lockutils [req-6743c50a-6539-4142-9111-0f7641f85262 req-3c3c2016-bd3a-4827-ae3e-9783a46e8373 service nova] Releasing lock "refresh_cache-9eacbeb5-b918-4b0f-82f4-d06a037803df" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2239.826577] env[61215]: DEBUG nova.compute.manager [req-6743c50a-6539-4142-9111-0f7641f85262 req-3c3c2016-bd3a-4827-ae3e-9783a46e8373 service nova] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Received event network-vif-plugged-fa5c8f31-2ea3-45e3-a55c-45f97b50481a {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 2239.826773] env[61215]: DEBUG oslo_concurrency.lockutils [req-6743c50a-6539-4142-9111-0f7641f85262 req-3c3c2016-bd3a-4827-ae3e-9783a46e8373 service nova] Acquiring lock "9eacbeb5-b918-4b0f-82f4-d06a037803df-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2239.826973] env[61215]: DEBUG oslo_concurrency.lockutils [req-6743c50a-6539-4142-9111-0f7641f85262 req-3c3c2016-bd3a-4827-ae3e-9783a46e8373 service nova] Lock "9eacbeb5-b918-4b0f-82f4-d06a037803df-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2239.827153] env[61215]: DEBUG oslo_concurrency.lockutils [req-6743c50a-6539-4142-9111-0f7641f85262 req-3c3c2016-bd3a-4827-ae3e-9783a46e8373 service nova] Lock "9eacbeb5-b918-4b0f-82f4-d06a037803df-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2239.827352] env[61215]: DEBUG nova.compute.manager [req-6743c50a-6539-4142-9111-0f7641f85262 req-3c3c2016-bd3a-4827-ae3e-9783a46e8373 service nova] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] No waiting events found dispatching network-vif-plugged-fa5c8f31-2ea3-45e3-a55c-45f97b50481a {{(pid=61215) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2239.827518] env[61215]: WARNING nova.compute.manager [req-6743c50a-6539-4142-9111-0f7641f85262 req-3c3c2016-bd3a-4827-ae3e-9783a46e8373 service nova] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Received unexpected event network-vif-plugged-fa5c8f31-2ea3-45e3-a55c-45f97b50481a for instance with vm_state building and task_state spawning. [ 2239.827684] env[61215]: DEBUG nova.compute.manager [req-6743c50a-6539-4142-9111-0f7641f85262 req-3c3c2016-bd3a-4827-ae3e-9783a46e8373 service nova] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Received event network-changed-fa5c8f31-2ea3-45e3-a55c-45f97b50481a {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 2239.827843] env[61215]: DEBUG nova.compute.manager [req-6743c50a-6539-4142-9111-0f7641f85262 req-3c3c2016-bd3a-4827-ae3e-9783a46e8373 service nova] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Refreshing instance network info cache due to event network-changed-fa5c8f31-2ea3-45e3-a55c-45f97b50481a. {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 2239.828039] env[61215]: DEBUG oslo_concurrency.lockutils [req-6743c50a-6539-4142-9111-0f7641f85262 req-3c3c2016-bd3a-4827-ae3e-9783a46e8373 service nova] Acquiring lock "refresh_cache-9eacbeb5-b918-4b0f-82f4-d06a037803df" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2239.828186] env[61215]: DEBUG oslo_concurrency.lockutils [req-6743c50a-6539-4142-9111-0f7641f85262 req-3c3c2016-bd3a-4827-ae3e-9783a46e8373 service nova] Acquired lock "refresh_cache-9eacbeb5-b918-4b0f-82f4-d06a037803df" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2239.828343] env[61215]: DEBUG nova.network.neutron [req-6743c50a-6539-4142-9111-0f7641f85262 req-3c3c2016-bd3a-4827-ae3e-9783a46e8373 service nova] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Refreshing network info cache for port fa5c8f31-2ea3-45e3-a55c-45f97b50481a {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2240.278476] env[61215]: DEBUG nova.network.neutron [req-6743c50a-6539-4142-9111-0f7641f85262 req-3c3c2016-bd3a-4827-ae3e-9783a46e8373 service nova] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Updated VIF entry in instance network info cache for port fa5c8f31-2ea3-45e3-a55c-45f97b50481a. {{(pid=61215) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2240.278923] env[61215]: DEBUG nova.network.neutron [req-6743c50a-6539-4142-9111-0f7641f85262 req-3c3c2016-bd3a-4827-ae3e-9783a46e8373 service nova] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Updating instance_info_cache with network_info: [{"id": "35de7a71-6029-4959-8079-e4f744353ee2", "address": "fa:16:3e:6f:f1:9d", "network": {"id": "83b19755-5dfb-48e1-a3d0-74d29338ada8", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-881124371", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.148", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33bc963e963d493f8bad1328351cd968", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7f41333-42ee-47f3-936c-d6701ab786d2", "external-id": "nsx-vlan-transportzone-674", "segmentation_id": 674, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35de7a71-60", "ovs_interfaceid": "35de7a71-6029-4959-8079-e4f744353ee2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "fa5c8f31-2ea3-45e3-a55c-45f97b50481a", "address": "fa:16:3e:88:4d:00", "network": {"id": "b985f3bf-9f29-46bf-8339-d1325e15d056", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-703356015", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.240", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "33bc963e963d493f8bad1328351cd968", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce", "external-id": "nsx-vlan-transportzone-725", "segmentation_id": 725, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa5c8f31-2e", "ovs_interfaceid": "fa5c8f31-2ea3-45e3-a55c-45f97b50481a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2240.289338] env[61215]: DEBUG oslo_concurrency.lockutils [req-6743c50a-6539-4142-9111-0f7641f85262 req-3c3c2016-bd3a-4827-ae3e-9783a46e8373 service nova] Releasing lock "refresh_cache-9eacbeb5-b918-4b0f-82f4-d06a037803df" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2246.011033] env[61215]: DEBUG oslo_concurrency.lockutils [None req-14b381d9-c1a3-4492-bed8-665e508715f3 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Acquiring lock "9eacbeb5-b918-4b0f-82f4-d06a037803df" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2250.907068] env[61215]: DEBUG oslo_concurrency.lockutils [None req-19f58ff6-2d6e-408f-9ef9-64ac5e862536 tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Acquiring lock "66420497-c0f6-4f1d-86ee-23d53400e325" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2274.383775] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Acquiring lock "799c902d-2bc1-4738-b3af-772a5feea819" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2274.384094] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Lock "799c902d-2bc1-4738-b3af-772a5feea819" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2275.672632] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2282.324658] env[61215]: WARNING oslo_vmware.rw_handles [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2282.324658] env[61215]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2282.324658] env[61215]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2282.324658] env[61215]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2282.324658] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2282.324658] env[61215]: ERROR oslo_vmware.rw_handles response.begin() [ 2282.324658] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2282.324658] env[61215]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2282.324658] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2282.324658] env[61215]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2282.324658] env[61215]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2282.324658] env[61215]: ERROR oslo_vmware.rw_handles [ 2282.324658] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Downloaded image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to vmware_temp/6e09cddf-f381-4605-8897-b03d73cdd783/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2282.326830] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Caching image {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2282.327089] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Copying Virtual Disk [datastore1] vmware_temp/6e09cddf-f381-4605-8897-b03d73cdd783/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk to [datastore1] vmware_temp/6e09cddf-f381-4605-8897-b03d73cdd783/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk {{(pid=61215) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2282.327373] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-797142c5-d776-4ea2-9a6b-dbcc49aad25d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.335542] env[61215]: DEBUG oslo_vmware.api [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Waiting for the task: (returnval){ [ 2282.335542] env[61215]: value = "task-1690425" [ 2282.335542] env[61215]: _type = "Task" [ 2282.335542] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2282.343389] env[61215]: DEBUG oslo_vmware.api [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Task: {'id': task-1690425, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2282.653833] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2282.845846] env[61215]: DEBUG oslo_vmware.exceptions [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Fault InvalidArgument not matched. {{(pid=61215) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2282.846099] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2282.846655] env[61215]: ERROR nova.compute.manager [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2282.846655] env[61215]: Faults: ['InvalidArgument'] [ 2282.846655] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Traceback (most recent call last): [ 2282.846655] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2282.846655] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] yield resources [ 2282.846655] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2282.846655] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] self.driver.spawn(context, instance, image_meta, [ 2282.846655] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2282.846655] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2282.846655] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2282.846655] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] self._fetch_image_if_missing(context, vi) [ 2282.846655] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2282.846655] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] image_cache(vi, tmp_image_ds_loc) [ 2282.846655] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2282.846655] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] vm_util.copy_virtual_disk( [ 2282.846655] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2282.846655] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] session._wait_for_task(vmdk_copy_task) [ 2282.846655] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2282.846655] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] return self.wait_for_task(task_ref) [ 2282.846655] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2282.846655] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] return evt.wait() [ 2282.846655] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2282.846655] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] result = hub.switch() [ 2282.846655] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2282.846655] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] return self.greenlet.switch() [ 2282.846655] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2282.846655] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] self.f(*self.args, **self.kw) [ 2282.846655] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2282.846655] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] raise exceptions.translate_fault(task_info.error) [ 2282.846655] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2282.846655] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Faults: ['InvalidArgument'] [ 2282.846655] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] [ 2282.847983] env[61215]: INFO nova.compute.manager [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Terminating instance [ 2282.848638] env[61215]: DEBUG oslo_concurrency.lockutils [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2282.848864] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2282.849116] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dfaab8a9-7519-49df-bda4-1658546f7124 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.851222] env[61215]: DEBUG nova.compute.manager [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2282.851389] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2282.852147] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-537dbed0-2759-40c3-ae7d-d2f297197705 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.859497] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Unregistering the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2282.859742] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2899d0c8-140a-4b52-8f95-bf3f02b2dabd {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.861853] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2282.862044] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61215) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2282.863011] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a6aa1a2-025f-45e3-a5f7-82cd80441ffb {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.867413] env[61215]: DEBUG oslo_vmware.api [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Waiting for the task: (returnval){ [ 2282.867413] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52f63539-62b2-c9fd-554c-5576366c83cd" [ 2282.867413] env[61215]: _type = "Task" [ 2282.867413] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2282.881179] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Preparing fetch location {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2282.881415] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Creating directory with path [datastore1] vmware_temp/511f25e9-c251-4d7b-bc34-b3fcfa705885/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2282.881625] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dbff3b0b-1c90-4efd-b898-6f394fb73e42 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.901422] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Created directory with path [datastore1] vmware_temp/511f25e9-c251-4d7b-bc34-b3fcfa705885/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2282.901618] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Fetch image to [datastore1] vmware_temp/511f25e9-c251-4d7b-bc34-b3fcfa705885/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2282.901859] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to [datastore1] vmware_temp/511f25e9-c251-4d7b-bc34-b3fcfa705885/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2282.902541] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-393fe7aa-a0b4-40f6-985f-d4b34bba7511 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.908979] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff9fae8c-4a59-4741-89c8-bad73cb52520 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.917691] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-498a4c15-e3d7-4769-8f87-e5a8f07d49b7 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.949511] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84e82f20-e90f-486d-b898-4f5761f63dcc {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.955831] env[61215]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-e031fe61-74bd-40da-8dc6-72f94fbc8e31 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.976692] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2283.018553] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Unregistered the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2283.018779] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Deleting contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2283.018964] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Deleting the datastore file [datastore1] bb56c470-9f85-44b1-b1ec-f44236e9de51 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2283.019237] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d81d1319-5ab2-4ef7-af4b-73994d3bd399 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.025087] env[61215]: DEBUG oslo_vmware.api [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Waiting for the task: (returnval){ [ 2283.025087] env[61215]: value = "task-1690427" [ 2283.025087] env[61215]: _type = "Task" [ 2283.025087] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2283.030049] env[61215]: DEBUG oslo_vmware.rw_handles [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/511f25e9-c251-4d7b-bc34-b3fcfa705885/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2283.034350] env[61215]: DEBUG oslo_vmware.api [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Task: {'id': task-1690427, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2283.093554] env[61215]: DEBUG oslo_vmware.rw_handles [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Completed reading data from the image iterator. {{(pid=61215) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2283.093770] env[61215]: DEBUG oslo_vmware.rw_handles [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/511f25e9-c251-4d7b-bc34-b3fcfa705885/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2283.535240] env[61215]: DEBUG oslo_vmware.api [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Task: {'id': task-1690427, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.106295} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2283.535584] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2283.535696] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Deleted contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2283.535876] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2283.536066] env[61215]: INFO nova.compute.manager [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Took 0.68 seconds to destroy the instance on the hypervisor. [ 2283.538170] env[61215]: DEBUG nova.compute.claims [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Aborting claim: {{(pid=61215) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2283.538345] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2283.538562] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2283.653661] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2283.654058] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2283.654240] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2283.730422] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-639a6385-ad36-4315-a860-27efa4361f7f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.738136] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51e1ad70-63e8-45c1-8e16-a8725ff96a73 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.768208] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a695eacb-30b0-4efb-86e2-1e7ed5ad8b5e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.775582] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c4aa1d3-e0dc-4b70-9d43-21bef3324e48 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.788854] env[61215]: DEBUG nova.compute.provider_tree [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2283.799156] env[61215]: DEBUG nova.scheduler.client.report [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2283.814562] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.276s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2283.815110] env[61215]: ERROR nova.compute.manager [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2283.815110] env[61215]: Faults: ['InvalidArgument'] [ 2283.815110] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Traceback (most recent call last): [ 2283.815110] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2283.815110] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] self.driver.spawn(context, instance, image_meta, [ 2283.815110] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2283.815110] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2283.815110] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2283.815110] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] self._fetch_image_if_missing(context, vi) [ 2283.815110] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2283.815110] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] image_cache(vi, tmp_image_ds_loc) [ 2283.815110] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2283.815110] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] vm_util.copy_virtual_disk( [ 2283.815110] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2283.815110] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] session._wait_for_task(vmdk_copy_task) [ 2283.815110] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2283.815110] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] return self.wait_for_task(task_ref) [ 2283.815110] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2283.815110] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] return evt.wait() [ 2283.815110] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2283.815110] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] result = hub.switch() [ 2283.815110] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2283.815110] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] return self.greenlet.switch() [ 2283.815110] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2283.815110] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] self.f(*self.args, **self.kw) [ 2283.815110] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2283.815110] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] raise exceptions.translate_fault(task_info.error) [ 2283.815110] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2283.815110] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Faults: ['InvalidArgument'] [ 2283.815110] env[61215]: ERROR nova.compute.manager [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] [ 2283.815965] env[61215]: DEBUG nova.compute.utils [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] VimFaultException {{(pid=61215) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2283.817336] env[61215]: DEBUG nova.compute.manager [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Build of instance bb56c470-9f85-44b1-b1ec-f44236e9de51 was re-scheduled: A specified parameter was not correct: fileType [ 2283.817336] env[61215]: Faults: ['InvalidArgument'] {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2283.817708] env[61215]: DEBUG nova.compute.manager [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Unplugging VIFs for instance {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2283.817906] env[61215]: DEBUG nova.compute.manager [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2283.818085] env[61215]: DEBUG nova.compute.manager [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2283.818252] env[61215]: DEBUG nova.network.neutron [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2284.635099] env[61215]: DEBUG nova.network.neutron [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2284.648589] env[61215]: INFO nova.compute.manager [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Took 0.83 seconds to deallocate network for instance. [ 2284.747478] env[61215]: INFO nova.scheduler.client.report [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Deleted allocations for instance bb56c470-9f85-44b1-b1ec-f44236e9de51 [ 2284.769047] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ddce366d-0d09-4b16-be0a-f4b2b6dcb653 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Lock "bb56c470-9f85-44b1-b1ec-f44236e9de51" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 629.963s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2284.769982] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f393385d-e6a1-427b-8368-0a05ce352669 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Lock "bb56c470-9f85-44b1-b1ec-f44236e9de51" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 432.839s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2284.770234] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f393385d-e6a1-427b-8368-0a05ce352669 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Acquiring lock "bb56c470-9f85-44b1-b1ec-f44236e9de51-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2284.770504] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f393385d-e6a1-427b-8368-0a05ce352669 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Lock "bb56c470-9f85-44b1-b1ec-f44236e9de51-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2284.770604] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f393385d-e6a1-427b-8368-0a05ce352669 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Lock "bb56c470-9f85-44b1-b1ec-f44236e9de51-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2284.772627] env[61215]: INFO nova.compute.manager [None req-f393385d-e6a1-427b-8368-0a05ce352669 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Terminating instance [ 2284.774363] env[61215]: DEBUG nova.compute.manager [None req-f393385d-e6a1-427b-8368-0a05ce352669 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2284.774544] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-f393385d-e6a1-427b-8368-0a05ce352669 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2284.775027] env[61215]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-475cac32-d304-458e-b590-de2bc78a8959 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.785755] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d18377d-c115-4684-9129-86d5ed1ca51d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.796185] env[61215]: DEBUG nova.compute.manager [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2284.817394] env[61215]: WARNING nova.virt.vmwareapi.vmops [None req-f393385d-e6a1-427b-8368-0a05ce352669 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bb56c470-9f85-44b1-b1ec-f44236e9de51 could not be found. [ 2284.817596] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-f393385d-e6a1-427b-8368-0a05ce352669 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2284.817774] env[61215]: INFO nova.compute.manager [None req-f393385d-e6a1-427b-8368-0a05ce352669 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2284.818339] env[61215]: DEBUG oslo.service.loopingcall [None req-f393385d-e6a1-427b-8368-0a05ce352669 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2284.818339] env[61215]: DEBUG nova.compute.manager [-] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2284.818339] env[61215]: DEBUG nova.network.neutron [-] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2284.845401] env[61215]: DEBUG nova.network.neutron [-] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2284.848195] env[61215]: DEBUG oslo_concurrency.lockutils [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2284.848429] env[61215]: DEBUG oslo_concurrency.lockutils [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2284.849829] env[61215]: INFO nova.compute.claims [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2284.853230] env[61215]: INFO nova.compute.manager [-] [instance: bb56c470-9f85-44b1-b1ec-f44236e9de51] Took 0.03 seconds to deallocate network for instance. [ 2284.944594] env[61215]: DEBUG oslo_concurrency.lockutils [None req-f393385d-e6a1-427b-8368-0a05ce352669 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Lock "bb56c470-9f85-44b1-b1ec-f44236e9de51" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.175s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2285.048655] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b87a261e-08b1-41b1-bcf3-b97f0d240520 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.056604] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35de4549-42f0-499d-9b98-2846285f107b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.085119] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4fd830f-2ab6-44e6-9f9d-ef01203786e8 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.091685] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ace1bdb6-b23b-4b09-adbb-71b901e0037f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.104355] env[61215]: DEBUG nova.compute.provider_tree [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2285.113332] env[61215]: DEBUG nova.scheduler.client.report [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2285.130016] env[61215]: DEBUG oslo_concurrency.lockutils [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.281s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2285.130491] env[61215]: DEBUG nova.compute.manager [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Start building networks asynchronously for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2285.163251] env[61215]: DEBUG nova.compute.utils [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Using /dev/sd instead of None {{(pid=61215) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2285.164676] env[61215]: DEBUG nova.compute.manager [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Allocating IP information in the background. {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2285.164843] env[61215]: DEBUG nova.network.neutron [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] allocate_for_instance() {{(pid=61215) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2285.173745] env[61215]: DEBUG nova.compute.manager [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Start building block device mappings for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2285.225579] env[61215]: DEBUG nova.policy [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c9db5492250b426c80f611d7a5686c1b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd3eac98da0cb41cbad12d92e9151b143', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61215) authorize /opt/stack/nova/nova/policy.py:203}} [ 2285.234819] env[61215]: DEBUG nova.compute.manager [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Start spawning the instance on the hypervisor. {{(pid=61215) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2285.259051] env[61215]: DEBUG nova.virt.hardware [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:05:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:04:45Z,direct_url=,disk_format='vmdk',id=e91f0c25-9ff9-4937-8440-f47cfb2028bc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9bc3f57c82894c5d9e08b66e77a25dc5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:04:46Z,virtual_size=,visibility=), allow threads: False {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2285.259300] env[61215]: DEBUG nova.virt.hardware [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Flavor limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2285.259462] env[61215]: DEBUG nova.virt.hardware [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Image limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2285.259650] env[61215]: DEBUG nova.virt.hardware [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Flavor pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2285.259797] env[61215]: DEBUG nova.virt.hardware [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Image pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2285.259946] env[61215]: DEBUG nova.virt.hardware [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2285.260169] env[61215]: DEBUG nova.virt.hardware [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2285.260332] env[61215]: DEBUG nova.virt.hardware [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2285.260501] env[61215]: DEBUG nova.virt.hardware [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Got 1 possible topologies {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2285.260667] env[61215]: DEBUG nova.virt.hardware [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2285.260838] env[61215]: DEBUG nova.virt.hardware [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2285.261696] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b997ecf2-3f4c-45e3-b471-e128d98f81b3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.269466] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f334036f-4a5b-4ac0-b3b7-e8d3ffb2c97f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.740207] env[61215]: DEBUG nova.network.neutron [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Successfully created port: 96d844e9-7fc9-43eb-b63b-4dd50172fb92 {{(pid=61215) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2286.280824] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8107a7f0-be75-4c7e-83bb-bcb6cfcbbd20 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquiring lock "49ab8e42-2da3-474b-b283-9d31b089fd76" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2286.436389] env[61215]: DEBUG nova.network.neutron [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Successfully updated port: 96d844e9-7fc9-43eb-b63b-4dd50172fb92 {{(pid=61215) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2286.455255] env[61215]: DEBUG oslo_concurrency.lockutils [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquiring lock "refresh_cache-49ab8e42-2da3-474b-b283-9d31b089fd76" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2286.455405] env[61215]: DEBUG oslo_concurrency.lockutils [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquired lock "refresh_cache-49ab8e42-2da3-474b-b283-9d31b089fd76" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2286.455588] env[61215]: DEBUG nova.network.neutron [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2286.496042] env[61215]: DEBUG nova.network.neutron [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2286.654839] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2286.655046] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Starting heal instance info cache {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 2286.655097] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Rebuilding the list of instances to heal {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2286.672420] env[61215]: DEBUG nova.network.neutron [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Updating instance_info_cache with network_info: [{"id": "96d844e9-7fc9-43eb-b63b-4dd50172fb92", "address": "fa:16:3e:63:8e:f1", "network": {"id": "a742dd7e-4910-4f56-91c4-28c6da7554f1", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2129807098-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3eac98da0cb41cbad12d92e9151b143", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96d844e9-7f", "ovs_interfaceid": "96d844e9-7fc9-43eb-b63b-4dd50172fb92", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2286.677343] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2286.677506] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2286.677644] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2286.677773] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2286.677898] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2286.678218] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2286.678403] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2286.678532] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2286.678656] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2286.678778] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2286.678900] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Didn't find any instances for network info cache update. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 2286.679397] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2286.682375] env[61215]: DEBUG oslo_concurrency.lockutils [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Releasing lock "refresh_cache-49ab8e42-2da3-474b-b283-9d31b089fd76" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2286.682630] env[61215]: DEBUG nova.compute.manager [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Instance network_info: |[{"id": "96d844e9-7fc9-43eb-b63b-4dd50172fb92", "address": "fa:16:3e:63:8e:f1", "network": {"id": "a742dd7e-4910-4f56-91c4-28c6da7554f1", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2129807098-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3eac98da0cb41cbad12d92e9151b143", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96d844e9-7f", "ovs_interfaceid": "96d844e9-7fc9-43eb-b63b-4dd50172fb92", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2286.683042] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:63:8e:f1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b98c49ac-0eb7-4311-aa8f-60581b2ce706', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '96d844e9-7fc9-43eb-b63b-4dd50172fb92', 'vif_model': 'vmxnet3'}] {{(pid=61215) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2286.690934] env[61215]: DEBUG oslo.service.loopingcall [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2286.692089] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Creating VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2286.692883] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2286.693120] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2286.693291] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2286.693491] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61215) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2286.693742] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-841f77f2-d523-40b2-8e1a-97fe9d47b93c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.708814] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9ef6976-5067-4952-b19c-31bf413889dc {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.718077] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4918390c-284e-489f-a789-f27cd57342e7 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.721958] env[61215]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2286.721958] env[61215]: value = "task-1690428" [ 2286.721958] env[61215]: _type = "Task" [ 2286.721958] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2286.735360] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-044fa40b-e03f-453b-b2af-ec8ea7ce3c8e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.740415] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690428, 'name': CreateVM_Task} progress is 6%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2286.744694] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cef160e-6a7d-45d5-b3d0-40bd9a73f173 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.778649] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181301MB free_disk=173GB free_vcpus=48 pci_devices=None {{(pid=61215) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2286.778851] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2286.779063] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2286.863997] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 38fe96cf-e570-4c0e-af6f-2f199efc06ff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2286.864138] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 0d609df2-621c-456f-b8ce-a209e9052153 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2286.864275] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 66420497-c0f6-4f1d-86ee-23d53400e325 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2286.864416] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 59d93243-c15c-4554-863b-779d94b3d858 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2286.864541] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance f3a3a510-a085-4388-b49d-b4371095b436 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2286.864678] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 17d70ed3-4a82-48c8-95ad-c81fb0772e42 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2286.864815] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance fb5fb791-5f62-4717-8d8f-7d56ffda15be actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2286.864936] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 455e7272-f099-496f-b929-ed6fa9a0ab44 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2286.865067] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 9eacbeb5-b918-4b0f-82f4-d06a037803df actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2286.865211] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 49ab8e42-2da3-474b-b283-9d31b089fd76 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2286.878106] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance e4fdc9ea-dad8-4422-b110-5c5aa4e25f3d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2286.891714] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 799c902d-2bc1-4738-b3af-772a5feea819 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2286.891714] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2286.891714] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2287.050210] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-950a5c50-37bf-4c32-a3c5-a33fec0fe161 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.057840] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d502af25-ea12-463f-811f-88d2819259bc {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.086691] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91c3a4a3-04d1-48a3-aef9-b47e93d15574 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.093803] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c4c698a-5e23-4217-af16-6f349163a05a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.108828] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2287.117020] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2287.130434] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61215) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2287.130604] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.352s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2287.233241] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690428, 'name': CreateVM_Task, 'duration_secs': 0.298898} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2287.233241] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Created VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2287.233562] env[61215]: DEBUG oslo_concurrency.lockutils [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2287.233741] env[61215]: DEBUG oslo_concurrency.lockutils [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2287.234073] env[61215]: DEBUG oslo_concurrency.lockutils [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2287.234322] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a7ff5c8-be88-4c22-93ac-e36c26c54fc8 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.239621] env[61215]: DEBUG oslo_vmware.api [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Waiting for the task: (returnval){ [ 2287.239621] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52e381b5-6834-1d5a-05fb-6d00060c1687" [ 2287.239621] env[61215]: _type = "Task" [ 2287.239621] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2287.247181] env[61215]: DEBUG oslo_vmware.api [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52e381b5-6834-1d5a-05fb-6d00060c1687, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2287.749126] env[61215]: DEBUG oslo_concurrency.lockutils [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2287.749436] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Processing image e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2287.749620] env[61215]: DEBUG oslo_concurrency.lockutils [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2288.063664] env[61215]: DEBUG oslo_concurrency.lockutils [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Acquiring lock "02265af9-44e6-4341-ba30-be7caad7da8b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2288.063896] env[61215]: DEBUG oslo_concurrency.lockutils [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Lock "02265af9-44e6-4341-ba30-be7caad7da8b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2288.105650] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2288.263753] env[61215]: DEBUG nova.compute.manager [req-310b5c91-56d6-44db-b627-649493a5a14e req-e2d05118-740a-448a-aaf4-6d9ff386e463 service nova] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Received event network-vif-plugged-96d844e9-7fc9-43eb-b63b-4dd50172fb92 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 2288.263974] env[61215]: DEBUG oslo_concurrency.lockutils [req-310b5c91-56d6-44db-b627-649493a5a14e req-e2d05118-740a-448a-aaf4-6d9ff386e463 service nova] Acquiring lock "49ab8e42-2da3-474b-b283-9d31b089fd76-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2288.264203] env[61215]: DEBUG oslo_concurrency.lockutils [req-310b5c91-56d6-44db-b627-649493a5a14e req-e2d05118-740a-448a-aaf4-6d9ff386e463 service nova] Lock "49ab8e42-2da3-474b-b283-9d31b089fd76-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2288.264368] env[61215]: DEBUG oslo_concurrency.lockutils [req-310b5c91-56d6-44db-b627-649493a5a14e req-e2d05118-740a-448a-aaf4-6d9ff386e463 service nova] Lock "49ab8e42-2da3-474b-b283-9d31b089fd76-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2288.264536] env[61215]: DEBUG nova.compute.manager [req-310b5c91-56d6-44db-b627-649493a5a14e req-e2d05118-740a-448a-aaf4-6d9ff386e463 service nova] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] No waiting events found dispatching network-vif-plugged-96d844e9-7fc9-43eb-b63b-4dd50172fb92 {{(pid=61215) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2288.264699] env[61215]: WARNING nova.compute.manager [req-310b5c91-56d6-44db-b627-649493a5a14e req-e2d05118-740a-448a-aaf4-6d9ff386e463 service nova] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Received unexpected event network-vif-plugged-96d844e9-7fc9-43eb-b63b-4dd50172fb92 for instance with vm_state building and task_state deleting. [ 2288.264853] env[61215]: DEBUG nova.compute.manager [req-310b5c91-56d6-44db-b627-649493a5a14e req-e2d05118-740a-448a-aaf4-6d9ff386e463 service nova] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Received event network-changed-96d844e9-7fc9-43eb-b63b-4dd50172fb92 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 2288.265069] env[61215]: DEBUG nova.compute.manager [req-310b5c91-56d6-44db-b627-649493a5a14e req-e2d05118-740a-448a-aaf4-6d9ff386e463 service nova] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Refreshing instance network info cache due to event network-changed-96d844e9-7fc9-43eb-b63b-4dd50172fb92. {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 2288.265201] env[61215]: DEBUG oslo_concurrency.lockutils [req-310b5c91-56d6-44db-b627-649493a5a14e req-e2d05118-740a-448a-aaf4-6d9ff386e463 service nova] Acquiring lock "refresh_cache-49ab8e42-2da3-474b-b283-9d31b089fd76" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2288.265338] env[61215]: DEBUG oslo_concurrency.lockutils [req-310b5c91-56d6-44db-b627-649493a5a14e req-e2d05118-740a-448a-aaf4-6d9ff386e463 service nova] Acquired lock "refresh_cache-49ab8e42-2da3-474b-b283-9d31b089fd76" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2288.265554] env[61215]: DEBUG nova.network.neutron [req-310b5c91-56d6-44db-b627-649493a5a14e req-e2d05118-740a-448a-aaf4-6d9ff386e463 service nova] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Refreshing network info cache for port 96d844e9-7fc9-43eb-b63b-4dd50172fb92 {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2288.534358] env[61215]: DEBUG nova.network.neutron [req-310b5c91-56d6-44db-b627-649493a5a14e req-e2d05118-740a-448a-aaf4-6d9ff386e463 service nova] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Updated VIF entry in instance network info cache for port 96d844e9-7fc9-43eb-b63b-4dd50172fb92. {{(pid=61215) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2288.534729] env[61215]: DEBUG nova.network.neutron [req-310b5c91-56d6-44db-b627-649493a5a14e req-e2d05118-740a-448a-aaf4-6d9ff386e463 service nova] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Updating instance_info_cache with network_info: [{"id": "96d844e9-7fc9-43eb-b63b-4dd50172fb92", "address": "fa:16:3e:63:8e:f1", "network": {"id": "a742dd7e-4910-4f56-91c4-28c6da7554f1", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2129807098-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3eac98da0cb41cbad12d92e9151b143", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96d844e9-7f", "ovs_interfaceid": "96d844e9-7fc9-43eb-b63b-4dd50172fb92", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2288.544292] env[61215]: DEBUG oslo_concurrency.lockutils [req-310b5c91-56d6-44db-b627-649493a5a14e req-e2d05118-740a-448a-aaf4-6d9ff386e463 service nova] Releasing lock "refresh_cache-49ab8e42-2da3-474b-b283-9d31b089fd76" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2288.650101] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2288.675047] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2288.675229] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61215) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 2328.499690] env[61215]: DEBUG oslo_concurrency.lockutils [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Acquiring lock "81e63102-75dc-4f4b-9b48-a63b2a9123f2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2328.499690] env[61215]: DEBUG oslo_concurrency.lockutils [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Lock "81e63102-75dc-4f4b-9b48-a63b2a9123f2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2332.343068] env[61215]: WARNING oslo_vmware.rw_handles [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2332.343068] env[61215]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2332.343068] env[61215]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2332.343068] env[61215]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2332.343068] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2332.343068] env[61215]: ERROR oslo_vmware.rw_handles response.begin() [ 2332.343068] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2332.343068] env[61215]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2332.343068] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2332.343068] env[61215]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2332.343068] env[61215]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2332.343068] env[61215]: ERROR oslo_vmware.rw_handles [ 2332.343795] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Downloaded image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to vmware_temp/511f25e9-c251-4d7b-bc34-b3fcfa705885/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2332.345653] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Caching image {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2332.345905] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Copying Virtual Disk [datastore1] vmware_temp/511f25e9-c251-4d7b-bc34-b3fcfa705885/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk to [datastore1] vmware_temp/511f25e9-c251-4d7b-bc34-b3fcfa705885/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk {{(pid=61215) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2332.346231] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4f3e84d8-9dd8-47b0-a0be-b87e679a7357 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.354746] env[61215]: DEBUG oslo_vmware.api [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Waiting for the task: (returnval){ [ 2332.354746] env[61215]: value = "task-1690429" [ 2332.354746] env[61215]: _type = "Task" [ 2332.354746] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2332.363768] env[61215]: DEBUG oslo_vmware.api [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Task: {'id': task-1690429, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2332.865215] env[61215]: DEBUG oslo_vmware.exceptions [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Fault InvalidArgument not matched. {{(pid=61215) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2332.865566] env[61215]: DEBUG oslo_concurrency.lockutils [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2332.866070] env[61215]: ERROR nova.compute.manager [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2332.866070] env[61215]: Faults: ['InvalidArgument'] [ 2332.866070] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Traceback (most recent call last): [ 2332.866070] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2332.866070] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] yield resources [ 2332.866070] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2332.866070] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] self.driver.spawn(context, instance, image_meta, [ 2332.866070] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2332.866070] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2332.866070] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2332.866070] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] self._fetch_image_if_missing(context, vi) [ 2332.866070] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2332.866070] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] image_cache(vi, tmp_image_ds_loc) [ 2332.866070] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2332.866070] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] vm_util.copy_virtual_disk( [ 2332.866070] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2332.866070] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] session._wait_for_task(vmdk_copy_task) [ 2332.866070] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2332.866070] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] return self.wait_for_task(task_ref) [ 2332.866070] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2332.866070] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] return evt.wait() [ 2332.866070] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2332.866070] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] result = hub.switch() [ 2332.866070] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2332.866070] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] return self.greenlet.switch() [ 2332.866070] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2332.866070] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] self.f(*self.args, **self.kw) [ 2332.866070] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2332.866070] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] raise exceptions.translate_fault(task_info.error) [ 2332.866070] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2332.866070] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Faults: ['InvalidArgument'] [ 2332.866070] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] [ 2332.866999] env[61215]: INFO nova.compute.manager [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Terminating instance [ 2332.868090] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2332.868316] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2332.868943] env[61215]: DEBUG nova.compute.manager [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2332.869167] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2332.869396] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-addbb038-8443-40de-a094-836657b777f9 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.871768] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57d5017d-ea2f-41fa-bc68-b8a7fdf4852c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.878586] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Unregistering the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2332.878800] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f50587e6-16a8-4467-88b8-b9efd19abe7e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.881050] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2332.881231] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61215) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2332.882181] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3b5210a-8d82-47ff-bfdb-3a8a68c812d3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.886616] env[61215]: DEBUG oslo_vmware.api [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Waiting for the task: (returnval){ [ 2332.886616] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]5252f390-cbba-0dc3-443b-6e7e710ae8af" [ 2332.886616] env[61215]: _type = "Task" [ 2332.886616] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2332.894317] env[61215]: DEBUG oslo_vmware.api [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5252f390-cbba-0dc3-443b-6e7e710ae8af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2333.074321] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Unregistered the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2333.074546] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Deleting contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2333.074760] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Deleting the datastore file [datastore1] 38fe96cf-e570-4c0e-af6f-2f199efc06ff {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2333.075123] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9448988c-547a-4e04-a3c9-1276e444ec5c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.082088] env[61215]: DEBUG oslo_vmware.api [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Waiting for the task: (returnval){ [ 2333.082088] env[61215]: value = "task-1690431" [ 2333.082088] env[61215]: _type = "Task" [ 2333.082088] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2333.089798] env[61215]: DEBUG oslo_vmware.api [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Task: {'id': task-1690431, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2333.396716] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Preparing fetch location {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2333.397098] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Creating directory with path [datastore1] vmware_temp/7e98cb02-6021-476f-b797-fdbca08364ee/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2333.397182] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-85624bdc-05fb-497b-9b75-e050982ed306 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.409143] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Created directory with path [datastore1] vmware_temp/7e98cb02-6021-476f-b797-fdbca08364ee/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2333.409345] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Fetch image to [datastore1] vmware_temp/7e98cb02-6021-476f-b797-fdbca08364ee/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2333.409521] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to [datastore1] vmware_temp/7e98cb02-6021-476f-b797-fdbca08364ee/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2333.410307] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d8c7fef-4f4c-49e1-81b6-5bfe7619de1b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.416999] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15124a46-c780-4413-8090-5b0bce425ffd {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.425958] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db07c75d-bd53-4509-a757-1a42d7d787ad {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.456764] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5230c540-d189-4410-9fcf-c48d3bea63bf {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.462572] env[61215]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5bde9daa-fdb7-4373-b918-0b62f29d818c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.489103] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2333.539715] env[61215]: DEBUG oslo_vmware.rw_handles [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7e98cb02-6021-476f-b797-fdbca08364ee/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2333.601618] env[61215]: DEBUG oslo_vmware.rw_handles [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Completed reading data from the image iterator. {{(pid=61215) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2333.601804] env[61215]: DEBUG oslo_vmware.rw_handles [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7e98cb02-6021-476f-b797-fdbca08364ee/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2333.605684] env[61215]: DEBUG oslo_vmware.api [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Task: {'id': task-1690431, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.104318} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2333.605946] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2333.606193] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Deleted contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2333.606385] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2333.606578] env[61215]: INFO nova.compute.manager [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Took 0.74 seconds to destroy the instance on the hypervisor. [ 2333.608829] env[61215]: DEBUG nova.compute.claims [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Aborting claim: {{(pid=61215) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2333.609034] env[61215]: DEBUG oslo_concurrency.lockutils [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2333.609272] env[61215]: DEBUG oslo_concurrency.lockutils [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2333.800112] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64879a3c-a1a2-4dff-b3d6-39b3ee5c29c2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.807732] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-271cc954-0ccb-4075-b436-ea78bd7de8fa {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.839804] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-757a05a9-dab2-499f-bf24-8fe206cc4b68 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.846396] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa89ccc5-3157-4e4d-aac5-4bff85ebbb47 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.858745] env[61215]: DEBUG nova.compute.provider_tree [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2333.867168] env[61215]: DEBUG nova.scheduler.client.report [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2333.881137] env[61215]: DEBUG oslo_concurrency.lockutils [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.272s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2333.881650] env[61215]: ERROR nova.compute.manager [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2333.881650] env[61215]: Faults: ['InvalidArgument'] [ 2333.881650] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Traceback (most recent call last): [ 2333.881650] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2333.881650] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] self.driver.spawn(context, instance, image_meta, [ 2333.881650] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2333.881650] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2333.881650] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2333.881650] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] self._fetch_image_if_missing(context, vi) [ 2333.881650] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2333.881650] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] image_cache(vi, tmp_image_ds_loc) [ 2333.881650] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2333.881650] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] vm_util.copy_virtual_disk( [ 2333.881650] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2333.881650] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] session._wait_for_task(vmdk_copy_task) [ 2333.881650] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2333.881650] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] return self.wait_for_task(task_ref) [ 2333.881650] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2333.881650] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] return evt.wait() [ 2333.881650] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2333.881650] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] result = hub.switch() [ 2333.881650] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2333.881650] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] return self.greenlet.switch() [ 2333.881650] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2333.881650] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] self.f(*self.args, **self.kw) [ 2333.881650] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2333.881650] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] raise exceptions.translate_fault(task_info.error) [ 2333.881650] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2333.881650] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Faults: ['InvalidArgument'] [ 2333.881650] env[61215]: ERROR nova.compute.manager [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] [ 2333.882793] env[61215]: DEBUG nova.compute.utils [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] VimFaultException {{(pid=61215) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2333.883712] env[61215]: DEBUG nova.compute.manager [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Build of instance 38fe96cf-e570-4c0e-af6f-2f199efc06ff was re-scheduled: A specified parameter was not correct: fileType [ 2333.883712] env[61215]: Faults: ['InvalidArgument'] {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2333.884108] env[61215]: DEBUG nova.compute.manager [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Unplugging VIFs for instance {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2333.884288] env[61215]: DEBUG nova.compute.manager [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2333.884461] env[61215]: DEBUG nova.compute.manager [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2333.884628] env[61215]: DEBUG nova.network.neutron [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2334.346109] env[61215]: DEBUG nova.network.neutron [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2334.359047] env[61215]: INFO nova.compute.manager [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Took 0.47 seconds to deallocate network for instance. [ 2334.461066] env[61215]: INFO nova.scheduler.client.report [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Deleted allocations for instance 38fe96cf-e570-4c0e-af6f-2f199efc06ff [ 2334.482928] env[61215]: DEBUG oslo_concurrency.lockutils [None req-10644bc7-47e1-4605-b799-f62e6d8425f8 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Lock "38fe96cf-e570-4c0e-af6f-2f199efc06ff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 632.250s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2334.484083] env[61215]: DEBUG oslo_concurrency.lockutils [None req-461a8408-a21d-43e1-90cb-b10da298e015 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Lock "38fe96cf-e570-4c0e-af6f-2f199efc06ff" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 436.167s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2334.484325] env[61215]: DEBUG oslo_concurrency.lockutils [None req-461a8408-a21d-43e1-90cb-b10da298e015 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Acquiring lock "38fe96cf-e570-4c0e-af6f-2f199efc06ff-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2334.484531] env[61215]: DEBUG oslo_concurrency.lockutils [None req-461a8408-a21d-43e1-90cb-b10da298e015 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Lock "38fe96cf-e570-4c0e-af6f-2f199efc06ff-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2334.484714] env[61215]: DEBUG oslo_concurrency.lockutils [None req-461a8408-a21d-43e1-90cb-b10da298e015 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Lock "38fe96cf-e570-4c0e-af6f-2f199efc06ff-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2334.486684] env[61215]: INFO nova.compute.manager [None req-461a8408-a21d-43e1-90cb-b10da298e015 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Terminating instance [ 2334.488232] env[61215]: DEBUG oslo_concurrency.lockutils [None req-461a8408-a21d-43e1-90cb-b10da298e015 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Acquiring lock "refresh_cache-38fe96cf-e570-4c0e-af6f-2f199efc06ff" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2334.488394] env[61215]: DEBUG oslo_concurrency.lockutils [None req-461a8408-a21d-43e1-90cb-b10da298e015 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Acquired lock "refresh_cache-38fe96cf-e570-4c0e-af6f-2f199efc06ff" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2334.488563] env[61215]: DEBUG nova.network.neutron [None req-461a8408-a21d-43e1-90cb-b10da298e015 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2334.495395] env[61215]: DEBUG nova.compute.manager [None req-cc8fe1cb-fe62-45cb-9272-34615e688f64 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: e4fdc9ea-dad8-4422-b110-5c5aa4e25f3d] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2334.518590] env[61215]: DEBUG nova.network.neutron [None req-461a8408-a21d-43e1-90cb-b10da298e015 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2334.522017] env[61215]: DEBUG nova.compute.manager [None req-cc8fe1cb-fe62-45cb-9272-34615e688f64 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: e4fdc9ea-dad8-4422-b110-5c5aa4e25f3d] Instance disappeared before build. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2431}} [ 2334.545207] env[61215]: DEBUG oslo_concurrency.lockutils [None req-cc8fe1cb-fe62-45cb-9272-34615e688f64 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Lock "e4fdc9ea-dad8-4422-b110-5c5aa4e25f3d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.922s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2334.555329] env[61215]: DEBUG nova.compute.manager [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2334.609467] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2334.609760] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2334.611203] env[61215]: INFO nova.compute.claims [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2334.626762] env[61215]: DEBUG nova.network.neutron [None req-461a8408-a21d-43e1-90cb-b10da298e015 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2334.636584] env[61215]: DEBUG oslo_concurrency.lockutils [None req-461a8408-a21d-43e1-90cb-b10da298e015 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Releasing lock "refresh_cache-38fe96cf-e570-4c0e-af6f-2f199efc06ff" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2334.637020] env[61215]: DEBUG nova.compute.manager [None req-461a8408-a21d-43e1-90cb-b10da298e015 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2334.637316] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-461a8408-a21d-43e1-90cb-b10da298e015 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2334.637855] env[61215]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-67ba2868-9844-4b8a-84a3-08f622a2d23e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.648015] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4603d74f-e48f-4e5a-a78e-400e2aed2261 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.679433] env[61215]: WARNING nova.virt.vmwareapi.vmops [None req-461a8408-a21d-43e1-90cb-b10da298e015 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 38fe96cf-e570-4c0e-af6f-2f199efc06ff could not be found. [ 2334.679640] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-461a8408-a21d-43e1-90cb-b10da298e015 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2334.679821] env[61215]: INFO nova.compute.manager [None req-461a8408-a21d-43e1-90cb-b10da298e015 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2334.680098] env[61215]: DEBUG oslo.service.loopingcall [None req-461a8408-a21d-43e1-90cb-b10da298e015 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2334.680328] env[61215]: DEBUG nova.compute.manager [-] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2334.680426] env[61215]: DEBUG nova.network.neutron [-] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2334.697200] env[61215]: DEBUG nova.network.neutron [-] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2334.704877] env[61215]: DEBUG nova.network.neutron [-] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2334.712913] env[61215]: INFO nova.compute.manager [-] [instance: 38fe96cf-e570-4c0e-af6f-2f199efc06ff] Took 0.03 seconds to deallocate network for instance. [ 2334.795343] env[61215]: DEBUG oslo_concurrency.lockutils [None req-461a8408-a21d-43e1-90cb-b10da298e015 tempest-ServerTagsTestJSON-833043055 tempest-ServerTagsTestJSON-833043055-project-member] Lock "38fe96cf-e570-4c0e-af6f-2f199efc06ff" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.311s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2334.812443] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bae9949e-213e-4f1d-89f9-a3c20416b15f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.820554] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58475a5a-2f5c-4bf6-b7bb-591b03b2d8f0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.850691] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e18e7e2a-1b8e-4fd8-8419-8e836516f8b8 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.857629] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ec51a6c-96c8-4890-bc1a-a28ad068e143 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.870475] env[61215]: DEBUG nova.compute.provider_tree [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2334.879725] env[61215]: DEBUG nova.scheduler.client.report [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2334.893012] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.283s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2334.893482] env[61215]: DEBUG nova.compute.manager [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Start building networks asynchronously for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2334.925881] env[61215]: DEBUG nova.compute.utils [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Using /dev/sd instead of None {{(pid=61215) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2334.927468] env[61215]: DEBUG nova.compute.manager [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Allocating IP information in the background. {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2334.927468] env[61215]: DEBUG nova.network.neutron [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] allocate_for_instance() {{(pid=61215) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2334.935090] env[61215]: DEBUG nova.compute.manager [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Start building block device mappings for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2334.990282] env[61215]: DEBUG nova.policy [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '27ff5932b5b64df087457974b83bba92', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '919d66c0b792490694750f6760a90114', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61215) authorize /opt/stack/nova/nova/policy.py:203}} [ 2334.996951] env[61215]: DEBUG nova.compute.manager [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Start spawning the instance on the hypervisor. {{(pid=61215) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2335.022328] env[61215]: DEBUG nova.virt.hardware [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:05:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:04:45Z,direct_url=,disk_format='vmdk',id=e91f0c25-9ff9-4937-8440-f47cfb2028bc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9bc3f57c82894c5d9e08b66e77a25dc5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:04:46Z,virtual_size=,visibility=), allow threads: False {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2335.022576] env[61215]: DEBUG nova.virt.hardware [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Flavor limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2335.022738] env[61215]: DEBUG nova.virt.hardware [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Image limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2335.022925] env[61215]: DEBUG nova.virt.hardware [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Flavor pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2335.023089] env[61215]: DEBUG nova.virt.hardware [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Image pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2335.023246] env[61215]: DEBUG nova.virt.hardware [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2335.023455] env[61215]: DEBUG nova.virt.hardware [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2335.023617] env[61215]: DEBUG nova.virt.hardware [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2335.023784] env[61215]: DEBUG nova.virt.hardware [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Got 1 possible topologies {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2335.024017] env[61215]: DEBUG nova.virt.hardware [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2335.024227] env[61215]: DEBUG nova.virt.hardware [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2335.025119] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dbe321f-b075-4e39-a915-5cf7a3cb6ca4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.032703] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9d74706-2511-4104-ab86-27290042d3ca {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.304893] env[61215]: DEBUG nova.network.neutron [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Successfully created port: 31fc7f6b-797a-401e-b408-608777acbbb8 {{(pid=61215) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2335.675546] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2335.940503] env[61215]: DEBUG nova.network.neutron [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Successfully updated port: 31fc7f6b-797a-401e-b408-608777acbbb8 {{(pid=61215) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2335.954469] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Acquiring lock "refresh_cache-799c902d-2bc1-4738-b3af-772a5feea819" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2335.954609] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Acquired lock "refresh_cache-799c902d-2bc1-4738-b3af-772a5feea819" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2335.954760] env[61215]: DEBUG nova.network.neutron [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2335.997529] env[61215]: DEBUG nova.network.neutron [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2336.173815] env[61215]: DEBUG nova.network.neutron [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Updating instance_info_cache with network_info: [{"id": "31fc7f6b-797a-401e-b408-608777acbbb8", "address": "fa:16:3e:56:a9:7b", "network": {"id": "33e707ae-ffbe-4208-a3b4-6f45d3a65a85", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2079632580-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "919d66c0b792490694750f6760a90114", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1c797172-a569-458e-aeb0-3f21e589a740", "external-id": "nsx-vlan-transportzone-957", "segmentation_id": 957, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31fc7f6b-79", "ovs_interfaceid": "31fc7f6b-797a-401e-b408-608777acbbb8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2336.185162] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Releasing lock "refresh_cache-799c902d-2bc1-4738-b3af-772a5feea819" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2336.185442] env[61215]: DEBUG nova.compute.manager [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Instance network_info: |[{"id": "31fc7f6b-797a-401e-b408-608777acbbb8", "address": "fa:16:3e:56:a9:7b", "network": {"id": "33e707ae-ffbe-4208-a3b4-6f45d3a65a85", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2079632580-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "919d66c0b792490694750f6760a90114", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1c797172-a569-458e-aeb0-3f21e589a740", "external-id": "nsx-vlan-transportzone-957", "segmentation_id": 957, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31fc7f6b-79", "ovs_interfaceid": "31fc7f6b-797a-401e-b408-608777acbbb8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2336.185857] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:56:a9:7b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1c797172-a569-458e-aeb0-3f21e589a740', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '31fc7f6b-797a-401e-b408-608777acbbb8', 'vif_model': 'vmxnet3'}] {{(pid=61215) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2336.193703] env[61215]: DEBUG oslo.service.loopingcall [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2336.194363] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Creating VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2336.194613] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b778d50b-acd4-4dc1-8fe5-78ed03eacfa6 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2336.215397] env[61215]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2336.215397] env[61215]: value = "task-1690432" [ 2336.215397] env[61215]: _type = "Task" [ 2336.215397] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2336.223024] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690432, 'name': CreateVM_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2336.444786] env[61215]: DEBUG nova.compute.manager [req-1851f55d-1988-48b3-bf59-8a51905df68a req-e93e939a-88b3-4d60-ad33-09ed491fdf20 service nova] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Received event network-vif-plugged-31fc7f6b-797a-401e-b408-608777acbbb8 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 2336.444952] env[61215]: DEBUG oslo_concurrency.lockutils [req-1851f55d-1988-48b3-bf59-8a51905df68a req-e93e939a-88b3-4d60-ad33-09ed491fdf20 service nova] Acquiring lock "799c902d-2bc1-4738-b3af-772a5feea819-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2336.445258] env[61215]: DEBUG oslo_concurrency.lockutils [req-1851f55d-1988-48b3-bf59-8a51905df68a req-e93e939a-88b3-4d60-ad33-09ed491fdf20 service nova] Lock "799c902d-2bc1-4738-b3af-772a5feea819-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2336.445406] env[61215]: DEBUG oslo_concurrency.lockutils [req-1851f55d-1988-48b3-bf59-8a51905df68a req-e93e939a-88b3-4d60-ad33-09ed491fdf20 service nova] Lock "799c902d-2bc1-4738-b3af-772a5feea819-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2336.445581] env[61215]: DEBUG nova.compute.manager [req-1851f55d-1988-48b3-bf59-8a51905df68a req-e93e939a-88b3-4d60-ad33-09ed491fdf20 service nova] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] No waiting events found dispatching network-vif-plugged-31fc7f6b-797a-401e-b408-608777acbbb8 {{(pid=61215) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2336.445747] env[61215]: WARNING nova.compute.manager [req-1851f55d-1988-48b3-bf59-8a51905df68a req-e93e939a-88b3-4d60-ad33-09ed491fdf20 service nova] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Received unexpected event network-vif-plugged-31fc7f6b-797a-401e-b408-608777acbbb8 for instance with vm_state building and task_state spawning. [ 2336.445911] env[61215]: DEBUG nova.compute.manager [req-1851f55d-1988-48b3-bf59-8a51905df68a req-e93e939a-88b3-4d60-ad33-09ed491fdf20 service nova] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Received event network-changed-31fc7f6b-797a-401e-b408-608777acbbb8 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 2336.446129] env[61215]: DEBUG nova.compute.manager [req-1851f55d-1988-48b3-bf59-8a51905df68a req-e93e939a-88b3-4d60-ad33-09ed491fdf20 service nova] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Refreshing instance network info cache due to event network-changed-31fc7f6b-797a-401e-b408-608777acbbb8. {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 2336.446339] env[61215]: DEBUG oslo_concurrency.lockutils [req-1851f55d-1988-48b3-bf59-8a51905df68a req-e93e939a-88b3-4d60-ad33-09ed491fdf20 service nova] Acquiring lock "refresh_cache-799c902d-2bc1-4738-b3af-772a5feea819" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2336.446482] env[61215]: DEBUG oslo_concurrency.lockutils [req-1851f55d-1988-48b3-bf59-8a51905df68a req-e93e939a-88b3-4d60-ad33-09ed491fdf20 service nova] Acquired lock "refresh_cache-799c902d-2bc1-4738-b3af-772a5feea819" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2336.446643] env[61215]: DEBUG nova.network.neutron [req-1851f55d-1988-48b3-bf59-8a51905df68a req-e93e939a-88b3-4d60-ad33-09ed491fdf20 service nova] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Refreshing network info cache for port 31fc7f6b-797a-401e-b408-608777acbbb8 {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2336.726049] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690432, 'name': CreateVM_Task, 'duration_secs': 0.305763} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2336.726049] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Created VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2336.726049] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2336.726442] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2336.726658] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2336.726916] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ba85c1b-9e06-48b5-a52f-feace52e757e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2336.731310] env[61215]: DEBUG oslo_vmware.api [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Waiting for the task: (returnval){ [ 2336.731310] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52faa2e6-dca6-c41c-cccc-d434d61d93e2" [ 2336.731310] env[61215]: _type = "Task" [ 2336.731310] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2336.738790] env[61215]: DEBUG oslo_vmware.api [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52faa2e6-dca6-c41c-cccc-d434d61d93e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2336.754527] env[61215]: DEBUG nova.network.neutron [req-1851f55d-1988-48b3-bf59-8a51905df68a req-e93e939a-88b3-4d60-ad33-09ed491fdf20 service nova] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Updated VIF entry in instance network info cache for port 31fc7f6b-797a-401e-b408-608777acbbb8. {{(pid=61215) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2336.754853] env[61215]: DEBUG nova.network.neutron [req-1851f55d-1988-48b3-bf59-8a51905df68a req-e93e939a-88b3-4d60-ad33-09ed491fdf20 service nova] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Updating instance_info_cache with network_info: [{"id": "31fc7f6b-797a-401e-b408-608777acbbb8", "address": "fa:16:3e:56:a9:7b", "network": {"id": "33e707ae-ffbe-4208-a3b4-6f45d3a65a85", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2079632580-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "919d66c0b792490694750f6760a90114", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1c797172-a569-458e-aeb0-3f21e589a740", "external-id": "nsx-vlan-transportzone-957", "segmentation_id": 957, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31fc7f6b-79", "ovs_interfaceid": "31fc7f6b-797a-401e-b408-608777acbbb8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2336.765508] env[61215]: DEBUG oslo_concurrency.lockutils [req-1851f55d-1988-48b3-bf59-8a51905df68a req-e93e939a-88b3-4d60-ad33-09ed491fdf20 service nova] Releasing lock "refresh_cache-799c902d-2bc1-4738-b3af-772a5feea819" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2337.241649] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2337.241918] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Processing image e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2337.242153] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2343.654053] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2344.653902] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2344.654127] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2345.654804] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2346.655057] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2346.665312] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2346.665571] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2346.665750] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2346.665902] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61215) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2346.667053] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5a99a57-1165-4c28-8b45-ca7a06d708f6 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2346.675901] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4a5c4b9-d27d-4b21-a11d-25c53b6e7040 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2346.691526] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8ea5540-9b3f-4e29-b143-adc2f026d9e0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2346.698806] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fcdd5c3-0052-40db-bc0b-c049c943a285 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2346.730531] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181219MB free_disk=173GB free_vcpus=48 pci_devices=None {{(pid=61215) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2346.730709] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2346.730881] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2346.814585] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 0d609df2-621c-456f-b8ce-a209e9052153 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2346.814585] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 66420497-c0f6-4f1d-86ee-23d53400e325 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2346.814585] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 59d93243-c15c-4554-863b-779d94b3d858 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2346.814585] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance f3a3a510-a085-4388-b49d-b4371095b436 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2346.814585] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 17d70ed3-4a82-48c8-95ad-c81fb0772e42 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2346.814585] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance fb5fb791-5f62-4717-8d8f-7d56ffda15be actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2346.814585] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 455e7272-f099-496f-b929-ed6fa9a0ab44 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2346.814585] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 9eacbeb5-b918-4b0f-82f4-d06a037803df actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2346.814585] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 49ab8e42-2da3-474b-b283-9d31b089fd76 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2346.814585] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 799c902d-2bc1-4738-b3af-772a5feea819 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2346.829707] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 02265af9-44e6-4341-ba30-be7caad7da8b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2346.840442] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 81e63102-75dc-4f4b-9b48-a63b2a9123f2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2346.840739] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2346.840800] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2347.020994] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8420695-f86b-4ebb-9613-4fa15202f556 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2347.027894] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e52e9b1-3d71-4b6b-8984-665b78681cea {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2347.059474] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3005d6ed-1a99-4e26-b7f9-f86f8f7540b3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2347.066925] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f801a782-57e7-446e-bebb-bc439a06d08a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2347.080218] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2347.089658] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2347.104158] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61215) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2347.104158] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.373s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2348.104415] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2348.104731] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Starting heal instance info cache {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 2348.104731] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Rebuilding the list of instances to heal {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2348.127077] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2348.127275] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2348.127422] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2348.127553] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2348.127678] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2348.127806] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2348.127925] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2348.128058] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2348.128183] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2348.128302] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2348.128421] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Didn't find any instances for network info cache update. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 2348.653580] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2348.653834] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2348.653988] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61215) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 2381.375548] env[61215]: WARNING oslo_vmware.rw_handles [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2381.375548] env[61215]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2381.375548] env[61215]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2381.375548] env[61215]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2381.375548] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2381.375548] env[61215]: ERROR oslo_vmware.rw_handles response.begin() [ 2381.375548] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2381.375548] env[61215]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2381.375548] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2381.375548] env[61215]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2381.375548] env[61215]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2381.375548] env[61215]: ERROR oslo_vmware.rw_handles [ 2381.376356] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Downloaded image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to vmware_temp/7e98cb02-6021-476f-b797-fdbca08364ee/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2381.378068] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Caching image {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2381.378317] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Copying Virtual Disk [datastore1] vmware_temp/7e98cb02-6021-476f-b797-fdbca08364ee/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk to [datastore1] vmware_temp/7e98cb02-6021-476f-b797-fdbca08364ee/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk {{(pid=61215) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2381.378608] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-861b45e2-2e0b-4db0-b995-3d3c8a6b26c0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2381.386575] env[61215]: DEBUG oslo_vmware.api [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Waiting for the task: (returnval){ [ 2381.386575] env[61215]: value = "task-1690433" [ 2381.386575] env[61215]: _type = "Task" [ 2381.386575] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2381.394433] env[61215]: DEBUG oslo_vmware.api [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Task: {'id': task-1690433, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2381.897539] env[61215]: DEBUG oslo_vmware.exceptions [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Fault InvalidArgument not matched. {{(pid=61215) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2381.897850] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2381.898426] env[61215]: ERROR nova.compute.manager [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2381.898426] env[61215]: Faults: ['InvalidArgument'] [ 2381.898426] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Traceback (most recent call last): [ 2381.898426] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2381.898426] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] yield resources [ 2381.898426] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2381.898426] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] self.driver.spawn(context, instance, image_meta, [ 2381.898426] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2381.898426] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2381.898426] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2381.898426] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] self._fetch_image_if_missing(context, vi) [ 2381.898426] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2381.898426] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] image_cache(vi, tmp_image_ds_loc) [ 2381.898426] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2381.898426] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] vm_util.copy_virtual_disk( [ 2381.898426] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2381.898426] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] session._wait_for_task(vmdk_copy_task) [ 2381.898426] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2381.898426] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] return self.wait_for_task(task_ref) [ 2381.898426] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2381.898426] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] return evt.wait() [ 2381.898426] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2381.898426] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] result = hub.switch() [ 2381.898426] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2381.898426] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] return self.greenlet.switch() [ 2381.898426] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2381.898426] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] self.f(*self.args, **self.kw) [ 2381.898426] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2381.898426] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] raise exceptions.translate_fault(task_info.error) [ 2381.898426] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2381.898426] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Faults: ['InvalidArgument'] [ 2381.898426] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] [ 2381.899553] env[61215]: INFO nova.compute.manager [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Terminating instance [ 2381.900305] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2381.900516] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2381.900770] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cb10802d-6db2-44f1-acc2-8257fb5b0555 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2381.902960] env[61215]: DEBUG nova.compute.manager [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2381.903189] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2381.903909] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe546f02-301f-4db3-9c21-ee05f5f7c8b2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2381.910562] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Unregistering the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2381.910779] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b6e6c65b-885d-49c8-92f5-542ce92fff6d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2381.912883] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2381.913071] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61215) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2381.914018] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16e4c003-121f-4035-aa1a-5eb8666b2856 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2381.918795] env[61215]: DEBUG oslo_vmware.api [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Waiting for the task: (returnval){ [ 2381.918795] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52bc7250-6d89-a2a5-1e9f-78bc7af31ab3" [ 2381.918795] env[61215]: _type = "Task" [ 2381.918795] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2381.926470] env[61215]: DEBUG oslo_vmware.api [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52bc7250-6d89-a2a5-1e9f-78bc7af31ab3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2381.988716] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Unregistered the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2381.988950] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Deleting contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2381.989163] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Deleting the datastore file [datastore1] 0d609df2-621c-456f-b8ce-a209e9052153 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2381.989406] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4d134cc6-4b5d-41b4-b0a6-0675ec8965d0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2381.995227] env[61215]: DEBUG oslo_vmware.api [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Waiting for the task: (returnval){ [ 2381.995227] env[61215]: value = "task-1690435" [ 2381.995227] env[61215]: _type = "Task" [ 2381.995227] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2382.003730] env[61215]: DEBUG oslo_vmware.api [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Task: {'id': task-1690435, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2382.429574] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Preparing fetch location {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2382.429867] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Creating directory with path [datastore1] vmware_temp/f500c999-a285-4ff9-98a4-a28c080a2ae3/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2382.430131] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-67e85f95-5072-48f1-b99c-47fcd87e22d8 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.443046] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Created directory with path [datastore1] vmware_temp/f500c999-a285-4ff9-98a4-a28c080a2ae3/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2382.443046] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Fetch image to [datastore1] vmware_temp/f500c999-a285-4ff9-98a4-a28c080a2ae3/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2382.443046] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to [datastore1] vmware_temp/f500c999-a285-4ff9-98a4-a28c080a2ae3/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2382.443277] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e345ed0-46b7-46c7-b058-c76e60625956 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.449947] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26d9dfdd-7d3d-48fb-bb04-d4acf658323a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.458742] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a216547-e62d-4a55-9669-aaf54829b275 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.488740] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b8aec4b-b126-44e9-a497-6d0b004195b7 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.494054] env[61215]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-3f0addb4-456e-4d2c-839a-0c51de2c5fb4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.502897] env[61215]: DEBUG oslo_vmware.api [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Task: {'id': task-1690435, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075546} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2382.503133] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2382.503336] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Deleted contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2382.503514] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2382.503692] env[61215]: INFO nova.compute.manager [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2382.505717] env[61215]: DEBUG nova.compute.claims [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Aborting claim: {{(pid=61215) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2382.505887] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2382.506115] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2382.519563] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2382.576801] env[61215]: DEBUG oslo_vmware.rw_handles [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f500c999-a285-4ff9-98a4-a28c080a2ae3/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2382.642157] env[61215]: DEBUG oslo_vmware.rw_handles [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Completed reading data from the image iterator. {{(pid=61215) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2382.642351] env[61215]: DEBUG oslo_vmware.rw_handles [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f500c999-a285-4ff9-98a4-a28c080a2ae3/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2382.738547] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07bf28da-54ac-467e-ba75-941c07d4bd1d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.746183] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37518302-58be-4c20-a8eb-7b75b6aef534 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.776320] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e1b61f8-bb9a-47d2-84a7-43d035f2176d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.783509] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd485410-6cec-4d21-854d-b8414a99a333 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.796151] env[61215]: DEBUG nova.compute.provider_tree [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2382.804652] env[61215]: DEBUG nova.scheduler.client.report [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2382.818299] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.312s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2382.818810] env[61215]: ERROR nova.compute.manager [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2382.818810] env[61215]: Faults: ['InvalidArgument'] [ 2382.818810] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Traceback (most recent call last): [ 2382.818810] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2382.818810] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] self.driver.spawn(context, instance, image_meta, [ 2382.818810] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2382.818810] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2382.818810] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2382.818810] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] self._fetch_image_if_missing(context, vi) [ 2382.818810] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2382.818810] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] image_cache(vi, tmp_image_ds_loc) [ 2382.818810] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2382.818810] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] vm_util.copy_virtual_disk( [ 2382.818810] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2382.818810] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] session._wait_for_task(vmdk_copy_task) [ 2382.818810] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2382.818810] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] return self.wait_for_task(task_ref) [ 2382.818810] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2382.818810] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] return evt.wait() [ 2382.818810] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2382.818810] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] result = hub.switch() [ 2382.818810] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2382.818810] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] return self.greenlet.switch() [ 2382.818810] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2382.818810] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] self.f(*self.args, **self.kw) [ 2382.818810] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2382.818810] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] raise exceptions.translate_fault(task_info.error) [ 2382.818810] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2382.818810] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Faults: ['InvalidArgument'] [ 2382.818810] env[61215]: ERROR nova.compute.manager [instance: 0d609df2-621c-456f-b8ce-a209e9052153] [ 2382.819947] env[61215]: DEBUG nova.compute.utils [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] VimFaultException {{(pid=61215) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2382.820848] env[61215]: DEBUG nova.compute.manager [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Build of instance 0d609df2-621c-456f-b8ce-a209e9052153 was re-scheduled: A specified parameter was not correct: fileType [ 2382.820848] env[61215]: Faults: ['InvalidArgument'] {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2382.821238] env[61215]: DEBUG nova.compute.manager [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Unplugging VIFs for instance {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2382.821414] env[61215]: DEBUG nova.compute.manager [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2382.821571] env[61215]: DEBUG nova.compute.manager [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2382.821757] env[61215]: DEBUG nova.network.neutron [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2383.261223] env[61215]: DEBUG nova.network.neutron [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2383.278215] env[61215]: INFO nova.compute.manager [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Took 0.46 seconds to deallocate network for instance. [ 2383.370798] env[61215]: INFO nova.scheduler.client.report [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Deleted allocations for instance 0d609df2-621c-456f-b8ce-a209e9052153 [ 2383.392445] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8205ae77-d4c7-43e7-8bed-fea0dd3d9d06 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Lock "0d609df2-621c-456f-b8ce-a209e9052153" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 586.609s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2383.393326] env[61215]: DEBUG oslo_concurrency.lockutils [None req-e8f5f49a-d479-41f1-86b3-d6c8369076c5 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Lock "0d609df2-621c-456f-b8ce-a209e9052153" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 390.131s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2383.393595] env[61215]: DEBUG oslo_concurrency.lockutils [None req-e8f5f49a-d479-41f1-86b3-d6c8369076c5 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Acquiring lock "0d609df2-621c-456f-b8ce-a209e9052153-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2383.393801] env[61215]: DEBUG oslo_concurrency.lockutils [None req-e8f5f49a-d479-41f1-86b3-d6c8369076c5 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Lock "0d609df2-621c-456f-b8ce-a209e9052153-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2383.394000] env[61215]: DEBUG oslo_concurrency.lockutils [None req-e8f5f49a-d479-41f1-86b3-d6c8369076c5 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Lock "0d609df2-621c-456f-b8ce-a209e9052153-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2383.396672] env[61215]: INFO nova.compute.manager [None req-e8f5f49a-d479-41f1-86b3-d6c8369076c5 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Terminating instance [ 2383.398393] env[61215]: DEBUG nova.compute.manager [None req-e8f5f49a-d479-41f1-86b3-d6c8369076c5 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2383.398589] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-e8f5f49a-d479-41f1-86b3-d6c8369076c5 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2383.398840] env[61215]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7d275e3f-7511-4279-9145-6986f237e921 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2383.402902] env[61215]: DEBUG nova.compute.manager [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2383.409257] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d18879da-bdf9-4c6f-9116-1b1f01f295db {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2383.437416] env[61215]: WARNING nova.virt.vmwareapi.vmops [None req-e8f5f49a-d479-41f1-86b3-d6c8369076c5 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0d609df2-621c-456f-b8ce-a209e9052153 could not be found. [ 2383.437686] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-e8f5f49a-d479-41f1-86b3-d6c8369076c5 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2383.437815] env[61215]: INFO nova.compute.manager [None req-e8f5f49a-d479-41f1-86b3-d6c8369076c5 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2383.438068] env[61215]: DEBUG oslo.service.loopingcall [None req-e8f5f49a-d479-41f1-86b3-d6c8369076c5 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2383.440198] env[61215]: DEBUG nova.compute.manager [-] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2383.440307] env[61215]: DEBUG nova.network.neutron [-] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2383.453858] env[61215]: DEBUG oslo_concurrency.lockutils [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2383.454109] env[61215]: DEBUG oslo_concurrency.lockutils [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2383.455584] env[61215]: INFO nova.compute.claims [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2383.466483] env[61215]: DEBUG nova.network.neutron [-] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2383.484756] env[61215]: INFO nova.compute.manager [-] [instance: 0d609df2-621c-456f-b8ce-a209e9052153] Took 0.04 seconds to deallocate network for instance. [ 2383.576358] env[61215]: DEBUG oslo_concurrency.lockutils [None req-e8f5f49a-d479-41f1-86b3-d6c8369076c5 tempest-ServersAdminNegativeTestJSON-1653120602 tempest-ServersAdminNegativeTestJSON-1653120602-project-member] Lock "0d609df2-621c-456f-b8ce-a209e9052153" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.183s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2383.636390] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3076aa3b-a165-41f9-93f7-680982ef80bb {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2383.644399] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7d29bfa-fe95-478c-9d41-b88423fb41ca {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2383.674181] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29a20375-cf48-424f-bbf3-230e60e0d354 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2383.681660] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34ec2643-1a53-4842-8dd2-f1f56dc3fb54 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2383.694158] env[61215]: DEBUG nova.compute.provider_tree [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2383.702823] env[61215]: DEBUG nova.scheduler.client.report [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2383.717638] env[61215]: DEBUG oslo_concurrency.lockutils [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.263s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2383.718144] env[61215]: DEBUG nova.compute.manager [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Start building networks asynchronously for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2383.754085] env[61215]: DEBUG nova.compute.utils [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Using /dev/sd instead of None {{(pid=61215) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2383.755596] env[61215]: DEBUG nova.compute.manager [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Allocating IP information in the background. {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2383.755596] env[61215]: DEBUG nova.network.neutron [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] allocate_for_instance() {{(pid=61215) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2383.763384] env[61215]: DEBUG nova.compute.manager [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Start building block device mappings for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2383.809027] env[61215]: DEBUG nova.policy [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ab070511b64a4d14ad02c93184a2531a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'de7bfb159ad5462ab19c5ed5c1432b16', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61215) authorize /opt/stack/nova/nova/policy.py:203}} [ 2383.826969] env[61215]: DEBUG nova.compute.manager [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Start spawning the instance on the hypervisor. {{(pid=61215) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2383.851398] env[61215]: DEBUG nova.virt.hardware [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:05:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:04:45Z,direct_url=,disk_format='vmdk',id=e91f0c25-9ff9-4937-8440-f47cfb2028bc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9bc3f57c82894c5d9e08b66e77a25dc5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:04:46Z,virtual_size=,visibility=), allow threads: False {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2383.851639] env[61215]: DEBUG nova.virt.hardware [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Flavor limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2383.851801] env[61215]: DEBUG nova.virt.hardware [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Image limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2383.851988] env[61215]: DEBUG nova.virt.hardware [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Flavor pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2383.852149] env[61215]: DEBUG nova.virt.hardware [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Image pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2383.852299] env[61215]: DEBUG nova.virt.hardware [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2383.852512] env[61215]: DEBUG nova.virt.hardware [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2383.852721] env[61215]: DEBUG nova.virt.hardware [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2383.852842] env[61215]: DEBUG nova.virt.hardware [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Got 1 possible topologies {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2383.853014] env[61215]: DEBUG nova.virt.hardware [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2383.853206] env[61215]: DEBUG nova.virt.hardware [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2383.854060] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd40870d-de22-4cdb-99a0-34a9095da51f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2383.861829] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09368946-ad91-43cc-9cac-98f48ccfb7fd {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2384.125702] env[61215]: DEBUG nova.network.neutron [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Successfully created port: 02f3658b-cf30-4e0a-bc03-9e1eeee2a197 {{(pid=61215) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2384.845542] env[61215]: DEBUG nova.network.neutron [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Successfully updated port: 02f3658b-cf30-4e0a-bc03-9e1eeee2a197 {{(pid=61215) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2384.862115] env[61215]: DEBUG oslo_concurrency.lockutils [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Acquiring lock "refresh_cache-02265af9-44e6-4341-ba30-be7caad7da8b" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2384.862229] env[61215]: DEBUG oslo_concurrency.lockutils [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Acquired lock "refresh_cache-02265af9-44e6-4341-ba30-be7caad7da8b" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2384.862332] env[61215]: DEBUG nova.network.neutron [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2384.901877] env[61215]: DEBUG nova.network.neutron [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2385.059843] env[61215]: DEBUG nova.network.neutron [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Updating instance_info_cache with network_info: [{"id": "02f3658b-cf30-4e0a-bc03-9e1eeee2a197", "address": "fa:16:3e:46:a5:41", "network": {"id": "aca5b4c9-c42c-4822-82bd-5db9917c1d95", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1452760311-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de7bfb159ad5462ab19c5ed5c1432b16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37fb1918-d178-4e12-93e6-316381e78be4", "external-id": "nsx-vlan-transportzone-763", "segmentation_id": 763, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02f3658b-cf", "ovs_interfaceid": "02f3658b-cf30-4e0a-bc03-9e1eeee2a197", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2385.070529] env[61215]: DEBUG oslo_concurrency.lockutils [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Releasing lock "refresh_cache-02265af9-44e6-4341-ba30-be7caad7da8b" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2385.070807] env[61215]: DEBUG nova.compute.manager [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Instance network_info: |[{"id": "02f3658b-cf30-4e0a-bc03-9e1eeee2a197", "address": "fa:16:3e:46:a5:41", "network": {"id": "aca5b4c9-c42c-4822-82bd-5db9917c1d95", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1452760311-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de7bfb159ad5462ab19c5ed5c1432b16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37fb1918-d178-4e12-93e6-316381e78be4", "external-id": "nsx-vlan-transportzone-763", "segmentation_id": 763, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02f3658b-cf", "ovs_interfaceid": "02f3658b-cf30-4e0a-bc03-9e1eeee2a197", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2385.071260] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:46:a5:41', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '37fb1918-d178-4e12-93e6-316381e78be4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '02f3658b-cf30-4e0a-bc03-9e1eeee2a197', 'vif_model': 'vmxnet3'}] {{(pid=61215) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2385.078725] env[61215]: DEBUG oslo.service.loopingcall [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2385.079205] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Creating VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2385.079431] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-27279371-2b49-4af0-b232-a956a2cd0d99 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2385.099421] env[61215]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2385.099421] env[61215]: value = "task-1690436" [ 2385.099421] env[61215]: _type = "Task" [ 2385.099421] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2385.106963] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690436, 'name': CreateVM_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2385.369619] env[61215]: DEBUG nova.compute.manager [req-4ad55547-83b9-4078-95e1-32e78dd104d9 req-e22a379d-39d2-4ec9-b6b7-d780f1fd3863 service nova] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Received event network-vif-plugged-02f3658b-cf30-4e0a-bc03-9e1eeee2a197 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 2385.369806] env[61215]: DEBUG oslo_concurrency.lockutils [req-4ad55547-83b9-4078-95e1-32e78dd104d9 req-e22a379d-39d2-4ec9-b6b7-d780f1fd3863 service nova] Acquiring lock "02265af9-44e6-4341-ba30-be7caad7da8b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2385.369930] env[61215]: DEBUG oslo_concurrency.lockutils [req-4ad55547-83b9-4078-95e1-32e78dd104d9 req-e22a379d-39d2-4ec9-b6b7-d780f1fd3863 service nova] Lock "02265af9-44e6-4341-ba30-be7caad7da8b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2385.370125] env[61215]: DEBUG oslo_concurrency.lockutils [req-4ad55547-83b9-4078-95e1-32e78dd104d9 req-e22a379d-39d2-4ec9-b6b7-d780f1fd3863 service nova] Lock "02265af9-44e6-4341-ba30-be7caad7da8b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2385.370282] env[61215]: DEBUG nova.compute.manager [req-4ad55547-83b9-4078-95e1-32e78dd104d9 req-e22a379d-39d2-4ec9-b6b7-d780f1fd3863 service nova] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] No waiting events found dispatching network-vif-plugged-02f3658b-cf30-4e0a-bc03-9e1eeee2a197 {{(pid=61215) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2385.370447] env[61215]: WARNING nova.compute.manager [req-4ad55547-83b9-4078-95e1-32e78dd104d9 req-e22a379d-39d2-4ec9-b6b7-d780f1fd3863 service nova] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Received unexpected event network-vif-plugged-02f3658b-cf30-4e0a-bc03-9e1eeee2a197 for instance with vm_state building and task_state spawning. [ 2385.370613] env[61215]: DEBUG nova.compute.manager [req-4ad55547-83b9-4078-95e1-32e78dd104d9 req-e22a379d-39d2-4ec9-b6b7-d780f1fd3863 service nova] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Received event network-changed-02f3658b-cf30-4e0a-bc03-9e1eeee2a197 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 2385.370774] env[61215]: DEBUG nova.compute.manager [req-4ad55547-83b9-4078-95e1-32e78dd104d9 req-e22a379d-39d2-4ec9-b6b7-d780f1fd3863 service nova] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Refreshing instance network info cache due to event network-changed-02f3658b-cf30-4e0a-bc03-9e1eeee2a197. {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 2385.370962] env[61215]: DEBUG oslo_concurrency.lockutils [req-4ad55547-83b9-4078-95e1-32e78dd104d9 req-e22a379d-39d2-4ec9-b6b7-d780f1fd3863 service nova] Acquiring lock "refresh_cache-02265af9-44e6-4341-ba30-be7caad7da8b" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2385.371108] env[61215]: DEBUG oslo_concurrency.lockutils [req-4ad55547-83b9-4078-95e1-32e78dd104d9 req-e22a379d-39d2-4ec9-b6b7-d780f1fd3863 service nova] Acquired lock "refresh_cache-02265af9-44e6-4341-ba30-be7caad7da8b" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2385.371268] env[61215]: DEBUG nova.network.neutron [req-4ad55547-83b9-4078-95e1-32e78dd104d9 req-e22a379d-39d2-4ec9-b6b7-d780f1fd3863 service nova] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Refreshing network info cache for port 02f3658b-cf30-4e0a-bc03-9e1eeee2a197 {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2385.609645] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690436, 'name': CreateVM_Task, 'duration_secs': 0.29066} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2385.609828] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Created VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2385.610769] env[61215]: DEBUG oslo_concurrency.lockutils [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2385.610945] env[61215]: DEBUG oslo_concurrency.lockutils [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2385.611294] env[61215]: DEBUG oslo_concurrency.lockutils [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2385.611550] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2cdecd54-6f2a-4340-ba3c-af33d7159447 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2385.616488] env[61215]: DEBUG oslo_vmware.api [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Waiting for the task: (returnval){ [ 2385.616488] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52832502-6805-2ff2-3e9a-07a8321cbf4f" [ 2385.616488] env[61215]: _type = "Task" [ 2385.616488] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2385.625472] env[61215]: DEBUG oslo_vmware.api [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52832502-6805-2ff2-3e9a-07a8321cbf4f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2385.899033] env[61215]: DEBUG nova.network.neutron [req-4ad55547-83b9-4078-95e1-32e78dd104d9 req-e22a379d-39d2-4ec9-b6b7-d780f1fd3863 service nova] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Updated VIF entry in instance network info cache for port 02f3658b-cf30-4e0a-bc03-9e1eeee2a197. {{(pid=61215) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2385.899314] env[61215]: DEBUG nova.network.neutron [req-4ad55547-83b9-4078-95e1-32e78dd104d9 req-e22a379d-39d2-4ec9-b6b7-d780f1fd3863 service nova] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Updating instance_info_cache with network_info: [{"id": "02f3658b-cf30-4e0a-bc03-9e1eeee2a197", "address": "fa:16:3e:46:a5:41", "network": {"id": "aca5b4c9-c42c-4822-82bd-5db9917c1d95", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1452760311-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de7bfb159ad5462ab19c5ed5c1432b16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37fb1918-d178-4e12-93e6-316381e78be4", "external-id": "nsx-vlan-transportzone-763", "segmentation_id": 763, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02f3658b-cf", "ovs_interfaceid": "02f3658b-cf30-4e0a-bc03-9e1eeee2a197", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2385.908415] env[61215]: DEBUG oslo_concurrency.lockutils [req-4ad55547-83b9-4078-95e1-32e78dd104d9 req-e22a379d-39d2-4ec9-b6b7-d780f1fd3863 service nova] Releasing lock "refresh_cache-02265af9-44e6-4341-ba30-be7caad7da8b" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2386.127943] env[61215]: DEBUG oslo_concurrency.lockutils [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2386.128213] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Processing image e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2386.128414] env[61215]: DEBUG oslo_concurrency.lockutils [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2397.650310] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2403.653642] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2405.655783] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2406.654343] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2407.653605] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2407.653848] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2407.665907] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2407.666149] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2407.666320] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2407.666478] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61215) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2407.668010] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36391f92-8c75-4bff-a2e7-2e88e75f9853 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.676687] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf4f695d-6a42-454a-8de8-54758ddcdf37 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.690899] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-315b35d8-7ad6-4594-bf95-1e8c065f8481 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.698021] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28ed873c-7ea4-42a2-8c99-7b865b0eae87 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2407.726997] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181305MB free_disk=173GB free_vcpus=48 pci_devices=None {{(pid=61215) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2407.727176] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2407.727374] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2407.843467] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 66420497-c0f6-4f1d-86ee-23d53400e325 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2407.843641] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 59d93243-c15c-4554-863b-779d94b3d858 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2407.843773] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance f3a3a510-a085-4388-b49d-b4371095b436 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2407.843898] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 17d70ed3-4a82-48c8-95ad-c81fb0772e42 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2407.844033] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance fb5fb791-5f62-4717-8d8f-7d56ffda15be actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2407.844160] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 455e7272-f099-496f-b929-ed6fa9a0ab44 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2407.844316] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 9eacbeb5-b918-4b0f-82f4-d06a037803df actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2407.844453] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 49ab8e42-2da3-474b-b283-9d31b089fd76 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2407.844573] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 799c902d-2bc1-4738-b3af-772a5feea819 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2407.845075] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 02265af9-44e6-4341-ba30-be7caad7da8b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2407.856136] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 81e63102-75dc-4f4b-9b48-a63b2a9123f2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2407.856373] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2407.856518] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2407.872631] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Refreshing inventories for resource provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 2407.886194] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Updating ProviderTree inventory for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 2407.886382] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Updating inventory in ProviderTree for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2407.897639] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Refreshing aggregate associations for resource provider 1329e087-aa78-44a2-9687-63a2b1b33fd5, aggregates: None {{(pid=61215) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 2407.914782] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Refreshing trait associations for resource provider 1329e087-aa78-44a2-9687-63a2b1b33fd5, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=61215) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 2408.036354] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e01a9555-ed0e-4144-b5b6-7c63792da79a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2408.045234] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc36e366-d50a-4294-8338-d53753355ef1 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2408.074074] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-310e2bf4-c1e1-4ce8-a676-98e4069b32ed {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2408.080723] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72d094b9-5f2e-4dd6-89a9-e23e3e6150e9 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2408.093149] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2408.101299] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2408.114214] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61215) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2408.114380] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.387s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2408.114586] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2408.114724] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Cleaning up deleted instances with incomplete migration {{(pid=61215) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11258}} [ 2409.121078] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2409.121455] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61215) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 2409.654588] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2409.655799] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Starting heal instance info cache {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 2409.655799] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Rebuilding the list of instances to heal {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2409.676269] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2409.676423] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2409.676558] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2409.676685] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2409.676809] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2409.676952] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2409.677100] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2409.677223] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2409.677342] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2409.677459] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2409.677578] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Didn't find any instances for network info cache update. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 2410.654823] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2411.649765] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2411.672624] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2411.672624] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Cleaning up deleted instances {{(pid=61215) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11220}} [ 2411.680880] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] There are 0 instances to clean {{(pid=61215) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 2412.654431] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2427.941688] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2427.942225] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Getting list of instances from cluster (obj){ [ 2427.942225] env[61215]: value = "domain-c8" [ 2427.942225] env[61215]: _type = "ClusterComputeResource" [ 2427.942225] env[61215]: } {{(pid=61215) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2427.943315] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaa0bbb2-229d-46ce-8e4e-63853a95e5ec {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2427.960978] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Got total of 10 instances {{(pid=61215) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2429.427972] env[61215]: WARNING oslo_vmware.rw_handles [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2429.427972] env[61215]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2429.427972] env[61215]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2429.427972] env[61215]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2429.427972] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2429.427972] env[61215]: ERROR oslo_vmware.rw_handles response.begin() [ 2429.427972] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2429.427972] env[61215]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2429.427972] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2429.427972] env[61215]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2429.427972] env[61215]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2429.427972] env[61215]: ERROR oslo_vmware.rw_handles [ 2429.428735] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Downloaded image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to vmware_temp/f500c999-a285-4ff9-98a4-a28c080a2ae3/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2429.430541] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Caching image {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2429.430797] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Copying Virtual Disk [datastore1] vmware_temp/f500c999-a285-4ff9-98a4-a28c080a2ae3/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk to [datastore1] vmware_temp/f500c999-a285-4ff9-98a4-a28c080a2ae3/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk {{(pid=61215) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2429.431087] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5f4670a0-854f-487d-a4bb-8a9953fc730f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2429.438363] env[61215]: DEBUG oslo_vmware.api [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Waiting for the task: (returnval){ [ 2429.438363] env[61215]: value = "task-1690437" [ 2429.438363] env[61215]: _type = "Task" [ 2429.438363] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2429.446757] env[61215]: DEBUG oslo_vmware.api [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Task: {'id': task-1690437, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2429.948929] env[61215]: DEBUG oslo_vmware.exceptions [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Fault InvalidArgument not matched. {{(pid=61215) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2429.949251] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2429.949831] env[61215]: ERROR nova.compute.manager [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2429.949831] env[61215]: Faults: ['InvalidArgument'] [ 2429.949831] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Traceback (most recent call last): [ 2429.949831] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2429.949831] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] yield resources [ 2429.949831] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2429.949831] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] self.driver.spawn(context, instance, image_meta, [ 2429.949831] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2429.949831] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2429.949831] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2429.949831] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] self._fetch_image_if_missing(context, vi) [ 2429.949831] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2429.949831] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] image_cache(vi, tmp_image_ds_loc) [ 2429.949831] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2429.949831] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] vm_util.copy_virtual_disk( [ 2429.949831] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2429.949831] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] session._wait_for_task(vmdk_copy_task) [ 2429.949831] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2429.949831] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] return self.wait_for_task(task_ref) [ 2429.949831] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2429.949831] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] return evt.wait() [ 2429.949831] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2429.949831] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] result = hub.switch() [ 2429.949831] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2429.949831] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] return self.greenlet.switch() [ 2429.949831] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2429.949831] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] self.f(*self.args, **self.kw) [ 2429.949831] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2429.949831] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] raise exceptions.translate_fault(task_info.error) [ 2429.949831] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2429.949831] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Faults: ['InvalidArgument'] [ 2429.949831] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] [ 2429.950858] env[61215]: INFO nova.compute.manager [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Terminating instance [ 2429.951755] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2429.951964] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2429.952219] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6dc0942c-d500-42c3-998e-7561335beaa2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2429.954657] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Acquiring lock "refresh_cache-66420497-c0f6-4f1d-86ee-23d53400e325" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2429.954818] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Acquired lock "refresh_cache-66420497-c0f6-4f1d-86ee-23d53400e325" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2429.954996] env[61215]: DEBUG nova.network.neutron [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2429.961919] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2429.962103] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61215) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2429.962812] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-917b684c-83e2-4c2a-88e1-35250b4b9b6b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2429.970523] env[61215]: DEBUG oslo_vmware.api [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Waiting for the task: (returnval){ [ 2429.970523] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52398b01-1058-437e-0da3-a3c18016157d" [ 2429.970523] env[61215]: _type = "Task" [ 2429.970523] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2429.979049] env[61215]: DEBUG oslo_vmware.api [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52398b01-1058-437e-0da3-a3c18016157d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2429.985051] env[61215]: DEBUG nova.network.neutron [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2430.046345] env[61215]: DEBUG nova.network.neutron [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2430.055286] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Releasing lock "refresh_cache-66420497-c0f6-4f1d-86ee-23d53400e325" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2430.055697] env[61215]: DEBUG nova.compute.manager [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2430.055889] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2430.056952] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c0315d-c690-48c5-82c0-f87552f8a5e2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2430.064819] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Unregistering the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2430.065058] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6c819700-16dc-4f02-9d01-4dee47cc6ed5 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2430.106790] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Unregistered the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2430.107043] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Deleting contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2430.107157] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Deleting the datastore file [datastore1] 66420497-c0f6-4f1d-86ee-23d53400e325 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2430.107426] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ab614aaf-e50d-41f6-9312-10d1de603dcb {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2430.114602] env[61215]: DEBUG oslo_vmware.api [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Waiting for the task: (returnval){ [ 2430.114602] env[61215]: value = "task-1690439" [ 2430.114602] env[61215]: _type = "Task" [ 2430.114602] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2430.121935] env[61215]: DEBUG oslo_vmware.api [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Task: {'id': task-1690439, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2430.481493] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Preparing fetch location {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2430.481926] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Creating directory with path [datastore1] vmware_temp/07321368-4982-414b-81dd-55e42878e110/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2430.481926] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-40d4abf4-7629-49e3-b223-a0cc710f5897 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2430.493246] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Created directory with path [datastore1] vmware_temp/07321368-4982-414b-81dd-55e42878e110/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2430.493431] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Fetch image to [datastore1] vmware_temp/07321368-4982-414b-81dd-55e42878e110/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2430.493604] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to [datastore1] vmware_temp/07321368-4982-414b-81dd-55e42878e110/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2430.494325] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-120ddb99-a919-4c55-9b64-011f9c684713 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2430.500738] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aeead9e-66a4-4133-ac6e-7b54c3074dbb {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2430.510548] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbd9fcb6-ca61-4c08-8185-6ab9b7deaf04 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2430.540288] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0e6d1ff-39e6-4695-80af-9f9c6e811329 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2430.545539] env[61215]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a6749bb5-6a23-4584-b904-cd23d8c42569 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2430.566637] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2430.614784] env[61215]: DEBUG oslo_vmware.rw_handles [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/07321368-4982-414b-81dd-55e42878e110/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2430.676994] env[61215]: DEBUG oslo_vmware.api [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Task: {'id': task-1690439, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.036076} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2430.678439] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2430.678642] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Deleted contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2430.678820] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2430.678996] env[61215]: INFO nova.compute.manager [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Took 0.62 seconds to destroy the instance on the hypervisor. [ 2430.679270] env[61215]: DEBUG oslo.service.loopingcall [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2430.679683] env[61215]: DEBUG oslo_vmware.rw_handles [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Completed reading data from the image iterator. {{(pid=61215) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2430.679847] env[61215]: DEBUG oslo_vmware.rw_handles [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/07321368-4982-414b-81dd-55e42878e110/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2430.680135] env[61215]: DEBUG nova.compute.manager [-] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Skipping network deallocation for instance since networking was not requested. {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 2430.682617] env[61215]: DEBUG nova.compute.claims [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Aborting claim: {{(pid=61215) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2430.682824] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2430.683090] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2430.850769] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4710376e-8d67-456a-95e0-483a291a18b2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2430.858822] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd7947b3-7855-4761-b346-2503c6e748f9 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2430.889729] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fee5b72d-9d3f-490c-aba3-f31485444a26 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2430.896878] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6911799-6d56-4a03-87b5-0eb46d017970 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2430.911572] env[61215]: DEBUG nova.compute.provider_tree [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2430.920601] env[61215]: DEBUG nova.scheduler.client.report [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2430.936310] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.253s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2430.936827] env[61215]: ERROR nova.compute.manager [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2430.936827] env[61215]: Faults: ['InvalidArgument'] [ 2430.936827] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Traceback (most recent call last): [ 2430.936827] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2430.936827] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] self.driver.spawn(context, instance, image_meta, [ 2430.936827] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2430.936827] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2430.936827] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2430.936827] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] self._fetch_image_if_missing(context, vi) [ 2430.936827] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2430.936827] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] image_cache(vi, tmp_image_ds_loc) [ 2430.936827] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2430.936827] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] vm_util.copy_virtual_disk( [ 2430.936827] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2430.936827] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] session._wait_for_task(vmdk_copy_task) [ 2430.936827] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2430.936827] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] return self.wait_for_task(task_ref) [ 2430.936827] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2430.936827] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] return evt.wait() [ 2430.936827] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2430.936827] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] result = hub.switch() [ 2430.936827] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2430.936827] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] return self.greenlet.switch() [ 2430.936827] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2430.936827] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] self.f(*self.args, **self.kw) [ 2430.936827] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2430.936827] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] raise exceptions.translate_fault(task_info.error) [ 2430.936827] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2430.936827] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Faults: ['InvalidArgument'] [ 2430.936827] env[61215]: ERROR nova.compute.manager [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] [ 2430.937978] env[61215]: DEBUG nova.compute.utils [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] VimFaultException {{(pid=61215) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2430.938946] env[61215]: DEBUG nova.compute.manager [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Build of instance 66420497-c0f6-4f1d-86ee-23d53400e325 was re-scheduled: A specified parameter was not correct: fileType [ 2430.938946] env[61215]: Faults: ['InvalidArgument'] {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2430.939369] env[61215]: DEBUG nova.compute.manager [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Unplugging VIFs for instance {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2430.939714] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Acquiring lock "refresh_cache-66420497-c0f6-4f1d-86ee-23d53400e325" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2430.939751] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Acquired lock "refresh_cache-66420497-c0f6-4f1d-86ee-23d53400e325" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2430.939899] env[61215]: DEBUG nova.network.neutron [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2430.964354] env[61215]: DEBUG nova.network.neutron [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2431.040349] env[61215]: DEBUG nova.network.neutron [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2431.049517] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Releasing lock "refresh_cache-66420497-c0f6-4f1d-86ee-23d53400e325" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2431.049765] env[61215]: DEBUG nova.compute.manager [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2431.049949] env[61215]: DEBUG nova.compute.manager [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Skipping network deallocation for instance since networking was not requested. {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 2431.134094] env[61215]: INFO nova.scheduler.client.report [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Deleted allocations for instance 66420497-c0f6-4f1d-86ee-23d53400e325 [ 2431.153573] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a73e3d5b-b53e-4224-9cd4-0e92fdc4604d tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Lock "66420497-c0f6-4f1d-86ee-23d53400e325" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 573.132s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2431.154677] env[61215]: DEBUG oslo_concurrency.lockutils [None req-19f58ff6-2d6e-408f-9ef9-64ac5e862536 tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Lock "66420497-c0f6-4f1d-86ee-23d53400e325" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 180.248s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2431.154899] env[61215]: DEBUG oslo_concurrency.lockutils [None req-19f58ff6-2d6e-408f-9ef9-64ac5e862536 tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Acquiring lock "66420497-c0f6-4f1d-86ee-23d53400e325-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2431.155122] env[61215]: DEBUG oslo_concurrency.lockutils [None req-19f58ff6-2d6e-408f-9ef9-64ac5e862536 tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Lock "66420497-c0f6-4f1d-86ee-23d53400e325-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2431.155293] env[61215]: DEBUG oslo_concurrency.lockutils [None req-19f58ff6-2d6e-408f-9ef9-64ac5e862536 tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Lock "66420497-c0f6-4f1d-86ee-23d53400e325-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2431.157432] env[61215]: INFO nova.compute.manager [None req-19f58ff6-2d6e-408f-9ef9-64ac5e862536 tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Terminating instance [ 2431.159079] env[61215]: DEBUG oslo_concurrency.lockutils [None req-19f58ff6-2d6e-408f-9ef9-64ac5e862536 tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Acquiring lock "refresh_cache-66420497-c0f6-4f1d-86ee-23d53400e325" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2431.159244] env[61215]: DEBUG oslo_concurrency.lockutils [None req-19f58ff6-2d6e-408f-9ef9-64ac5e862536 tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Acquired lock "refresh_cache-66420497-c0f6-4f1d-86ee-23d53400e325" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2431.159418] env[61215]: DEBUG nova.network.neutron [None req-19f58ff6-2d6e-408f-9ef9-64ac5e862536 tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2431.168384] env[61215]: DEBUG nova.compute.manager [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2431.188223] env[61215]: DEBUG nova.network.neutron [None req-19f58ff6-2d6e-408f-9ef9-64ac5e862536 tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2431.217251] env[61215]: DEBUG oslo_concurrency.lockutils [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2431.218193] env[61215]: DEBUG oslo_concurrency.lockutils [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2431.219193] env[61215]: INFO nova.compute.claims [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2431.248881] env[61215]: DEBUG nova.network.neutron [None req-19f58ff6-2d6e-408f-9ef9-64ac5e862536 tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2431.257214] env[61215]: DEBUG oslo_concurrency.lockutils [None req-19f58ff6-2d6e-408f-9ef9-64ac5e862536 tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Releasing lock "refresh_cache-66420497-c0f6-4f1d-86ee-23d53400e325" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2431.257609] env[61215]: DEBUG nova.compute.manager [None req-19f58ff6-2d6e-408f-9ef9-64ac5e862536 tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2431.257805] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-19f58ff6-2d6e-408f-9ef9-64ac5e862536 tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2431.258319] env[61215]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8a8d1a1d-04a7-4599-a10b-96c5967dd390 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2431.269044] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8089aacf-7165-4452-87b3-5472ffd65e37 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2431.300408] env[61215]: WARNING nova.virt.vmwareapi.vmops [None req-19f58ff6-2d6e-408f-9ef9-64ac5e862536 tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 66420497-c0f6-4f1d-86ee-23d53400e325 could not be found. [ 2431.300628] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-19f58ff6-2d6e-408f-9ef9-64ac5e862536 tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2431.300816] env[61215]: INFO nova.compute.manager [None req-19f58ff6-2d6e-408f-9ef9-64ac5e862536 tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2431.301083] env[61215]: DEBUG oslo.service.loopingcall [None req-19f58ff6-2d6e-408f-9ef9-64ac5e862536 tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2431.303665] env[61215]: DEBUG nova.compute.manager [-] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2431.303776] env[61215]: DEBUG nova.network.neutron [-] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2431.321336] env[61215]: DEBUG nova.network.neutron [-] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2431.350230] env[61215]: DEBUG nova.network.neutron [-] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2431.361175] env[61215]: INFO nova.compute.manager [-] [instance: 66420497-c0f6-4f1d-86ee-23d53400e325] Took 0.06 seconds to deallocate network for instance. [ 2431.400301] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe7e2e28-0b48-404f-8221-0773cf1db2b0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2431.410207] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e30e6b73-2235-4076-b39f-3862debbb094 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2431.443612] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7831d48-3b1c-417b-a6a7-0544bd8d4539 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2431.448392] env[61215]: DEBUG oslo_concurrency.lockutils [None req-19f58ff6-2d6e-408f-9ef9-64ac5e862536 tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Lock "66420497-c0f6-4f1d-86ee-23d53400e325" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.294s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2431.454412] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c1038a8-a8fc-4554-9d20-6362e6f70051 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2431.468018] env[61215]: DEBUG nova.compute.provider_tree [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2431.476167] env[61215]: DEBUG nova.scheduler.client.report [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2431.488264] env[61215]: DEBUG oslo_concurrency.lockutils [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.270s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2431.488694] env[61215]: DEBUG nova.compute.manager [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Start building networks asynchronously for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2431.517510] env[61215]: DEBUG nova.compute.utils [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Using /dev/sd instead of None {{(pid=61215) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2431.518689] env[61215]: DEBUG nova.compute.manager [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Allocating IP information in the background. {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2431.518859] env[61215]: DEBUG nova.network.neutron [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] allocate_for_instance() {{(pid=61215) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2431.529034] env[61215]: DEBUG nova.compute.manager [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Start building block device mappings for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2431.573944] env[61215]: DEBUG nova.policy [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6578389615ad46528d49d98bf36b459a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd16229c82ee9494f9921831a13c6bf7e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61215) authorize /opt/stack/nova/nova/policy.py:203}} [ 2431.588165] env[61215]: DEBUG nova.compute.manager [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Start spawning the instance on the hypervisor. {{(pid=61215) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2431.609944] env[61215]: DEBUG nova.virt.hardware [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:05:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:04:45Z,direct_url=,disk_format='vmdk',id=e91f0c25-9ff9-4937-8440-f47cfb2028bc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9bc3f57c82894c5d9e08b66e77a25dc5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:04:46Z,virtual_size=,visibility=), allow threads: False {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2431.610343] env[61215]: DEBUG nova.virt.hardware [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Flavor limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2431.610450] env[61215]: DEBUG nova.virt.hardware [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Image limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2431.610622] env[61215]: DEBUG nova.virt.hardware [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Flavor pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2431.610782] env[61215]: DEBUG nova.virt.hardware [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Image pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2431.610950] env[61215]: DEBUG nova.virt.hardware [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2431.611207] env[61215]: DEBUG nova.virt.hardware [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2431.611397] env[61215]: DEBUG nova.virt.hardware [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2431.611586] env[61215]: DEBUG nova.virt.hardware [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Got 1 possible topologies {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2431.611856] env[61215]: DEBUG nova.virt.hardware [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2431.611964] env[61215]: DEBUG nova.virt.hardware [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2431.612869] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9ca7df6-8aa2-4603-815d-b06fb61e86e8 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2431.621421] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a79d6666-432d-4a26-9d49-d0f8f952bd04 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2431.977863] env[61215]: DEBUG nova.network.neutron [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Successfully created port: 2b0f921c-fd14-4b9f-bf5b-abdf0ff9b3d8 {{(pid=61215) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2432.609020] env[61215]: DEBUG nova.compute.manager [req-18d3729d-a07f-4f4c-98f4-738371261d46 req-f5eaadd0-843a-489f-bd83-03be92291427 service nova] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Received event network-vif-plugged-2b0f921c-fd14-4b9f-bf5b-abdf0ff9b3d8 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 2432.609305] env[61215]: DEBUG oslo_concurrency.lockutils [req-18d3729d-a07f-4f4c-98f4-738371261d46 req-f5eaadd0-843a-489f-bd83-03be92291427 service nova] Acquiring lock "81e63102-75dc-4f4b-9b48-a63b2a9123f2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2432.609553] env[61215]: DEBUG oslo_concurrency.lockutils [req-18d3729d-a07f-4f4c-98f4-738371261d46 req-f5eaadd0-843a-489f-bd83-03be92291427 service nova] Lock "81e63102-75dc-4f4b-9b48-a63b2a9123f2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2432.609774] env[61215]: DEBUG oslo_concurrency.lockutils [req-18d3729d-a07f-4f4c-98f4-738371261d46 req-f5eaadd0-843a-489f-bd83-03be92291427 service nova] Lock "81e63102-75dc-4f4b-9b48-a63b2a9123f2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2432.610012] env[61215]: DEBUG nova.compute.manager [req-18d3729d-a07f-4f4c-98f4-738371261d46 req-f5eaadd0-843a-489f-bd83-03be92291427 service nova] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] No waiting events found dispatching network-vif-plugged-2b0f921c-fd14-4b9f-bf5b-abdf0ff9b3d8 {{(pid=61215) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2432.610662] env[61215]: WARNING nova.compute.manager [req-18d3729d-a07f-4f4c-98f4-738371261d46 req-f5eaadd0-843a-489f-bd83-03be92291427 service nova] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Received unexpected event network-vif-plugged-2b0f921c-fd14-4b9f-bf5b-abdf0ff9b3d8 for instance with vm_state building and task_state spawning. [ 2432.756538] env[61215]: DEBUG nova.network.neutron [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Successfully updated port: 2b0f921c-fd14-4b9f-bf5b-abdf0ff9b3d8 {{(pid=61215) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2432.772300] env[61215]: DEBUG oslo_concurrency.lockutils [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Acquiring lock "refresh_cache-81e63102-75dc-4f4b-9b48-a63b2a9123f2" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2432.772439] env[61215]: DEBUG oslo_concurrency.lockutils [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Acquired lock "refresh_cache-81e63102-75dc-4f4b-9b48-a63b2a9123f2" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2432.772599] env[61215]: DEBUG nova.network.neutron [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2432.819123] env[61215]: DEBUG nova.network.neutron [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2432.988289] env[61215]: DEBUG nova.network.neutron [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Updating instance_info_cache with network_info: [{"id": "2b0f921c-fd14-4b9f-bf5b-abdf0ff9b3d8", "address": "fa:16:3e:7b:b5:a9", "network": {"id": "ca8a320f-bc61-4d27-ae96-1e7235aa0925", "bridge": "br-int", "label": "tempest-ServersTestJSON-1082542131-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d16229c82ee9494f9921831a13c6bf7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aef08290-001a-4ae8-aff0-1889e2211389", "external-id": "nsx-vlan-transportzone-389", "segmentation_id": 389, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b0f921c-fd", "ovs_interfaceid": "2b0f921c-fd14-4b9f-bf5b-abdf0ff9b3d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2433.002019] env[61215]: DEBUG oslo_concurrency.lockutils [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Releasing lock "refresh_cache-81e63102-75dc-4f4b-9b48-a63b2a9123f2" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2433.002333] env[61215]: DEBUG nova.compute.manager [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Instance network_info: |[{"id": "2b0f921c-fd14-4b9f-bf5b-abdf0ff9b3d8", "address": "fa:16:3e:7b:b5:a9", "network": {"id": "ca8a320f-bc61-4d27-ae96-1e7235aa0925", "bridge": "br-int", "label": "tempest-ServersTestJSON-1082542131-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d16229c82ee9494f9921831a13c6bf7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aef08290-001a-4ae8-aff0-1889e2211389", "external-id": "nsx-vlan-transportzone-389", "segmentation_id": 389, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b0f921c-fd", "ovs_interfaceid": "2b0f921c-fd14-4b9f-bf5b-abdf0ff9b3d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2433.002759] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7b:b5:a9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aef08290-001a-4ae8-aff0-1889e2211389', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2b0f921c-fd14-4b9f-bf5b-abdf0ff9b3d8', 'vif_model': 'vmxnet3'}] {{(pid=61215) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2433.010372] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Creating folder: Project (d16229c82ee9494f9921831a13c6bf7e). Parent ref: group-v352463. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2433.010934] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dde83633-2939-44b5-b89a-1729ff79098f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2433.021490] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Created folder: Project (d16229c82ee9494f9921831a13c6bf7e) in parent group-v352463. [ 2433.021704] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Creating folder: Instances. Parent ref: group-v352562. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2433.021934] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a5440d1f-ac38-41e8-ba09-a5d792c4db28 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2433.030809] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Created folder: Instances in parent group-v352562. [ 2433.031047] env[61215]: DEBUG oslo.service.loopingcall [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2433.031231] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Creating VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2433.031424] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d776a4a6-e1a6-4f91-9988-d489d3b7fc60 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2433.049892] env[61215]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2433.049892] env[61215]: value = "task-1690442" [ 2433.049892] env[61215]: _type = "Task" [ 2433.049892] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2433.057338] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690442, 'name': CreateVM_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2433.559347] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690442, 'name': CreateVM_Task, 'duration_secs': 0.297448} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2433.559579] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Created VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2433.560268] env[61215]: DEBUG oslo_concurrency.lockutils [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2433.560438] env[61215]: DEBUG oslo_concurrency.lockutils [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2433.560746] env[61215]: DEBUG oslo_concurrency.lockutils [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2433.560985] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28e5d415-6dff-4cad-8d8e-f327c1ceab25 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2433.565258] env[61215]: DEBUG oslo_vmware.api [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Waiting for the task: (returnval){ [ 2433.565258] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52d76f3a-6f6b-5a48-0d93-40dfe6068dba" [ 2433.565258] env[61215]: _type = "Task" [ 2433.565258] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2433.572540] env[61215]: DEBUG oslo_vmware.api [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52d76f3a-6f6b-5a48-0d93-40dfe6068dba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2434.076473] env[61215]: DEBUG oslo_concurrency.lockutils [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2434.076836] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Processing image e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2434.076909] env[61215]: DEBUG oslo_concurrency.lockutils [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2434.634428] env[61215]: DEBUG nova.compute.manager [req-9ac3a894-11ca-4226-9d5c-c23633d9ed17 req-d9d6a68d-c321-4487-a7a1-442536dfc6f9 service nova] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Received event network-changed-2b0f921c-fd14-4b9f-bf5b-abdf0ff9b3d8 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 2434.634621] env[61215]: DEBUG nova.compute.manager [req-9ac3a894-11ca-4226-9d5c-c23633d9ed17 req-d9d6a68d-c321-4487-a7a1-442536dfc6f9 service nova] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Refreshing instance network info cache due to event network-changed-2b0f921c-fd14-4b9f-bf5b-abdf0ff9b3d8. {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 2434.634832] env[61215]: DEBUG oslo_concurrency.lockutils [req-9ac3a894-11ca-4226-9d5c-c23633d9ed17 req-d9d6a68d-c321-4487-a7a1-442536dfc6f9 service nova] Acquiring lock "refresh_cache-81e63102-75dc-4f4b-9b48-a63b2a9123f2" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2434.634976] env[61215]: DEBUG oslo_concurrency.lockutils [req-9ac3a894-11ca-4226-9d5c-c23633d9ed17 req-d9d6a68d-c321-4487-a7a1-442536dfc6f9 service nova] Acquired lock "refresh_cache-81e63102-75dc-4f4b-9b48-a63b2a9123f2" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2434.635208] env[61215]: DEBUG nova.network.neutron [req-9ac3a894-11ca-4226-9d5c-c23633d9ed17 req-d9d6a68d-c321-4487-a7a1-442536dfc6f9 service nova] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Refreshing network info cache for port 2b0f921c-fd14-4b9f-bf5b-abdf0ff9b3d8 {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2434.885097] env[61215]: DEBUG nova.network.neutron [req-9ac3a894-11ca-4226-9d5c-c23633d9ed17 req-d9d6a68d-c321-4487-a7a1-442536dfc6f9 service nova] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Updated VIF entry in instance network info cache for port 2b0f921c-fd14-4b9f-bf5b-abdf0ff9b3d8. {{(pid=61215) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2434.885638] env[61215]: DEBUG nova.network.neutron [req-9ac3a894-11ca-4226-9d5c-c23633d9ed17 req-d9d6a68d-c321-4487-a7a1-442536dfc6f9 service nova] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Updating instance_info_cache with network_info: [{"id": "2b0f921c-fd14-4b9f-bf5b-abdf0ff9b3d8", "address": "fa:16:3e:7b:b5:a9", "network": {"id": "ca8a320f-bc61-4d27-ae96-1e7235aa0925", "bridge": "br-int", "label": "tempest-ServersTestJSON-1082542131-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d16229c82ee9494f9921831a13c6bf7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aef08290-001a-4ae8-aff0-1889e2211389", "external-id": "nsx-vlan-transportzone-389", "segmentation_id": 389, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b0f921c-fd", "ovs_interfaceid": "2b0f921c-fd14-4b9f-bf5b-abdf0ff9b3d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2434.895418] env[61215]: DEBUG oslo_concurrency.lockutils [req-9ac3a894-11ca-4226-9d5c-c23633d9ed17 req-d9d6a68d-c321-4487-a7a1-442536dfc6f9 service nova] Releasing lock "refresh_cache-81e63102-75dc-4f4b-9b48-a63b2a9123f2" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2441.952645] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2441.974863] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Getting list of instances from cluster (obj){ [ 2441.974863] env[61215]: value = "domain-c8" [ 2441.974863] env[61215]: _type = "ClusterComputeResource" [ 2441.974863] env[61215]: } {{(pid=61215) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2441.976155] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62fee3b6-e138-493c-ac90-8e19b1392ab5 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2441.993058] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Got total of 10 instances {{(pid=61215) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2441.993221] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Triggering sync for uuid 59d93243-c15c-4554-863b-779d94b3d858 {{(pid=61215) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10330}} [ 2441.993415] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Triggering sync for uuid f3a3a510-a085-4388-b49d-b4371095b436 {{(pid=61215) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10330}} [ 2441.993577] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Triggering sync for uuid 17d70ed3-4a82-48c8-95ad-c81fb0772e42 {{(pid=61215) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10330}} [ 2441.993727] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Triggering sync for uuid fb5fb791-5f62-4717-8d8f-7d56ffda15be {{(pid=61215) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10330}} [ 2441.996711] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Triggering sync for uuid 455e7272-f099-496f-b929-ed6fa9a0ab44 {{(pid=61215) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10330}} [ 2441.996711] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Triggering sync for uuid 9eacbeb5-b918-4b0f-82f4-d06a037803df {{(pid=61215) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10330}} [ 2441.996711] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Triggering sync for uuid 49ab8e42-2da3-474b-b283-9d31b089fd76 {{(pid=61215) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10330}} [ 2441.996711] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Triggering sync for uuid 799c902d-2bc1-4738-b3af-772a5feea819 {{(pid=61215) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10330}} [ 2441.996711] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Triggering sync for uuid 02265af9-44e6-4341-ba30-be7caad7da8b {{(pid=61215) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10330}} [ 2441.996711] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Triggering sync for uuid 81e63102-75dc-4f4b-9b48-a63b2a9123f2 {{(pid=61215) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10330}} [ 2441.996711] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "59d93243-c15c-4554-863b-779d94b3d858" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2441.996711] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "f3a3a510-a085-4388-b49d-b4371095b436" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2441.996711] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "17d70ed3-4a82-48c8-95ad-c81fb0772e42" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2441.996711] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "fb5fb791-5f62-4717-8d8f-7d56ffda15be" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2441.996711] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "455e7272-f099-496f-b929-ed6fa9a0ab44" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2441.996711] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "9eacbeb5-b918-4b0f-82f4-d06a037803df" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2441.996711] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "49ab8e42-2da3-474b-b283-9d31b089fd76" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2441.996711] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "799c902d-2bc1-4738-b3af-772a5feea819" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2441.996711] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "02265af9-44e6-4341-ba30-be7caad7da8b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2441.997403] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "81e63102-75dc-4f4b-9b48-a63b2a9123f2" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2457.694763] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2463.654365] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2465.654654] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2467.655181] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2468.653581] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2468.653872] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2468.654072] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61215) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 2468.654271] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2468.665987] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2468.666271] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2468.666407] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2468.666816] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61215) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2468.668289] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29210f47-2c09-4dd9-b5d7-0e978c8b20a9 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2468.676621] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54fb5cf4-c70d-493e-b737-c0aa9b783b68 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2468.690449] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd18d838-f46c-4350-a05d-177fc3d8c54d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2468.696676] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f105887-0a80-4c63-bfc6-154d16f95d6c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2468.726794] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181313MB free_disk=173GB free_vcpus=48 pci_devices=None {{(pid=61215) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2468.727024] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2468.727328] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2468.801223] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 59d93243-c15c-4554-863b-779d94b3d858 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2468.801399] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance f3a3a510-a085-4388-b49d-b4371095b436 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2468.801503] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 17d70ed3-4a82-48c8-95ad-c81fb0772e42 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2468.801623] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance fb5fb791-5f62-4717-8d8f-7d56ffda15be actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2468.801764] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 455e7272-f099-496f-b929-ed6fa9a0ab44 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2468.801872] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 9eacbeb5-b918-4b0f-82f4-d06a037803df actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2468.801986] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 49ab8e42-2da3-474b-b283-9d31b089fd76 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2468.802111] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 799c902d-2bc1-4738-b3af-772a5feea819 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2468.802224] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 02265af9-44e6-4341-ba30-be7caad7da8b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2468.802336] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 81e63102-75dc-4f4b-9b48-a63b2a9123f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2468.802522] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2468.802660] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2468.923448] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61977bc5-f9d7-4f30-83f6-a09f1a7c00e2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2468.931434] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceef5fa2-9f03-46f1-88f9-c15e0a81b4bc {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2468.961222] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-384a2d48-21b3-4f05-bef7-5ad250ad2266 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2468.969252] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca9b65fc-a355-4923-84a2-d84bb7d6af39 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2468.981960] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2468.989907] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2469.004784] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61215) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2469.004962] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.278s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2470.133336] env[61215]: DEBUG oslo_concurrency.lockutils [None req-1a94f1cd-6256-4f57-9373-a00cfc737d93 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Acquiring lock "799c902d-2bc1-4738-b3af-772a5feea819" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2471.005655] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2471.005904] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Starting heal instance info cache {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 2471.005984] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Rebuilding the list of instances to heal {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2471.026223] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2471.026382] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2471.026504] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2471.026641] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2471.026765] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2471.026888] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2471.027015] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2471.027325] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2471.027465] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2471.027833] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2471.027833] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Didn't find any instances for network info cache update. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 2471.028229] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2478.753073] env[61215]: WARNING oslo_vmware.rw_handles [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2478.753073] env[61215]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2478.753073] env[61215]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2478.753073] env[61215]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2478.753073] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2478.753073] env[61215]: ERROR oslo_vmware.rw_handles response.begin() [ 2478.753073] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2478.753073] env[61215]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2478.753073] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2478.753073] env[61215]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2478.753073] env[61215]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2478.753073] env[61215]: ERROR oslo_vmware.rw_handles [ 2478.754193] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Downloaded image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to vmware_temp/07321368-4982-414b-81dd-55e42878e110/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2478.756334] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Caching image {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2478.756659] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Copying Virtual Disk [datastore1] vmware_temp/07321368-4982-414b-81dd-55e42878e110/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk to [datastore1] vmware_temp/07321368-4982-414b-81dd-55e42878e110/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk {{(pid=61215) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2478.757072] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5f5129be-79f6-4f18-858f-5a9890e3b82f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2478.766304] env[61215]: DEBUG oslo_vmware.api [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Waiting for the task: (returnval){ [ 2478.766304] env[61215]: value = "task-1690443" [ 2478.766304] env[61215]: _type = "Task" [ 2478.766304] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2478.775518] env[61215]: DEBUG oslo_vmware.api [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Task: {'id': task-1690443, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2479.276177] env[61215]: DEBUG oslo_vmware.exceptions [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Fault InvalidArgument not matched. {{(pid=61215) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2479.276688] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2479.277346] env[61215]: ERROR nova.compute.manager [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2479.277346] env[61215]: Faults: ['InvalidArgument'] [ 2479.277346] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] Traceback (most recent call last): [ 2479.277346] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2479.277346] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] yield resources [ 2479.277346] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2479.277346] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] self.driver.spawn(context, instance, image_meta, [ 2479.277346] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2479.277346] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2479.277346] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2479.277346] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] self._fetch_image_if_missing(context, vi) [ 2479.277346] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2479.277346] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] image_cache(vi, tmp_image_ds_loc) [ 2479.277346] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2479.277346] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] vm_util.copy_virtual_disk( [ 2479.277346] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2479.277346] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] session._wait_for_task(vmdk_copy_task) [ 2479.277346] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2479.277346] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] return self.wait_for_task(task_ref) [ 2479.277346] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2479.277346] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] return evt.wait() [ 2479.277346] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2479.277346] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] result = hub.switch() [ 2479.277346] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2479.277346] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] return self.greenlet.switch() [ 2479.277346] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2479.277346] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] self.f(*self.args, **self.kw) [ 2479.277346] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2479.277346] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] raise exceptions.translate_fault(task_info.error) [ 2479.277346] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2479.277346] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] Faults: ['InvalidArgument'] [ 2479.277346] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] [ 2479.278871] env[61215]: INFO nova.compute.manager [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Terminating instance [ 2479.279319] env[61215]: DEBUG oslo_concurrency.lockutils [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2479.279534] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2479.279767] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6b633703-2453-4548-b10e-d8a08dbe05a0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2479.281791] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Acquiring lock "refresh_cache-59d93243-c15c-4554-863b-779d94b3d858" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2479.281972] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Acquired lock "refresh_cache-59d93243-c15c-4554-863b-779d94b3d858" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2479.282164] env[61215]: DEBUG nova.network.neutron [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2479.288959] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2479.289172] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61215) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2479.290337] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56869078-0093-4386-86fd-a7cb2a1db061 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2479.297761] env[61215]: DEBUG oslo_vmware.api [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Waiting for the task: (returnval){ [ 2479.297761] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52c837bc-ba96-8394-750a-fd63d5ac86d4" [ 2479.297761] env[61215]: _type = "Task" [ 2479.297761] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2479.312551] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Preparing fetch location {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2479.312796] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Creating directory with path [datastore1] vmware_temp/2435bc27-d8fb-4e3e-89dd-e129b8e838c3/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2479.313022] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2781b3fa-f129-436d-bf5a-af1dfb1beeee {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2479.332515] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Created directory with path [datastore1] vmware_temp/2435bc27-d8fb-4e3e-89dd-e129b8e838c3/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2479.332711] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Fetch image to [datastore1] vmware_temp/2435bc27-d8fb-4e3e-89dd-e129b8e838c3/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2479.332914] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to [datastore1] vmware_temp/2435bc27-d8fb-4e3e-89dd-e129b8e838c3/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2479.333704] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad5942ec-5a43-4d26-9108-83018124323b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2479.340448] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ce86821-62fb-45e6-a0c3-1ad66eb797a9 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2479.349120] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-387e7c0b-2153-4dc9-bbcd-3a6f0be2771d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2479.380640] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d670f59-1548-45f3-9f7e-c3d67c4d21f5 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2479.386233] env[61215]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c6288a44-e2b5-4bf3-84d2-121e3865e857 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2479.397601] env[61215]: DEBUG nova.network.neutron [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2479.408274] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2479.457266] env[61215]: DEBUG oslo_vmware.rw_handles [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2435bc27-d8fb-4e3e-89dd-e129b8e838c3/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2479.513968] env[61215]: DEBUG nova.network.neutron [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2479.518053] env[61215]: DEBUG oslo_vmware.rw_handles [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Completed reading data from the image iterator. {{(pid=61215) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2479.518053] env[61215]: DEBUG oslo_vmware.rw_handles [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2435bc27-d8fb-4e3e-89dd-e129b8e838c3/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2479.523513] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Releasing lock "refresh_cache-59d93243-c15c-4554-863b-779d94b3d858" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2479.523902] env[61215]: DEBUG nova.compute.manager [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2479.524126] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2479.525240] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d4e286c-c803-46e3-82b5-b36d2eece22d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2479.533565] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Unregistering the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2479.533748] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6a76f1d6-f2a5-4e0a-9907-1e3463c8e97f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2479.560614] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Unregistered the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2479.560841] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Deleting contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2479.561075] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Deleting the datastore file [datastore1] 59d93243-c15c-4554-863b-779d94b3d858 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2479.561317] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dfcd53e5-42b5-4bbd-b985-13df9c4eed46 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2479.567620] env[61215]: DEBUG oslo_vmware.api [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Waiting for the task: (returnval){ [ 2479.567620] env[61215]: value = "task-1690445" [ 2479.567620] env[61215]: _type = "Task" [ 2479.567620] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2479.574862] env[61215]: DEBUG oslo_vmware.api [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Task: {'id': task-1690445, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2480.077181] env[61215]: DEBUG oslo_vmware.api [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Task: {'id': task-1690445, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.032555} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2480.077453] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2480.077649] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Deleted contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2480.077861] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2480.078059] env[61215]: INFO nova.compute.manager [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Took 0.55 seconds to destroy the instance on the hypervisor. [ 2480.078316] env[61215]: DEBUG oslo.service.loopingcall [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2480.078519] env[61215]: DEBUG nova.compute.manager [-] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Skipping network deallocation for instance since networking was not requested. {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 2480.080679] env[61215]: DEBUG nova.compute.claims [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Aborting claim: {{(pid=61215) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2480.080855] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2480.081091] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2480.237022] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81827c3b-cde0-4c83-9b38-b1e4eff2439b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2480.243863] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29d04d37-6dba-4e1a-8fa9-82f746fa5c25 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2480.273798] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07b226b2-9d56-433d-84c5-ab6898890154 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2480.280474] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ede3a54-09c1-49a4-9a30-5cc8d9c53221 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2480.293296] env[61215]: DEBUG nova.compute.provider_tree [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2480.302025] env[61215]: DEBUG nova.scheduler.client.report [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2480.317886] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.237s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2480.318426] env[61215]: ERROR nova.compute.manager [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2480.318426] env[61215]: Faults: ['InvalidArgument'] [ 2480.318426] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] Traceback (most recent call last): [ 2480.318426] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2480.318426] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] self.driver.spawn(context, instance, image_meta, [ 2480.318426] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2480.318426] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2480.318426] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2480.318426] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] self._fetch_image_if_missing(context, vi) [ 2480.318426] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2480.318426] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] image_cache(vi, tmp_image_ds_loc) [ 2480.318426] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2480.318426] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] vm_util.copy_virtual_disk( [ 2480.318426] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2480.318426] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] session._wait_for_task(vmdk_copy_task) [ 2480.318426] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2480.318426] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] return self.wait_for_task(task_ref) [ 2480.318426] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2480.318426] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] return evt.wait() [ 2480.318426] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2480.318426] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] result = hub.switch() [ 2480.318426] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2480.318426] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] return self.greenlet.switch() [ 2480.318426] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2480.318426] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] self.f(*self.args, **self.kw) [ 2480.318426] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2480.318426] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] raise exceptions.translate_fault(task_info.error) [ 2480.318426] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2480.318426] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] Faults: ['InvalidArgument'] [ 2480.318426] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] [ 2480.319474] env[61215]: DEBUG nova.compute.utils [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] VimFaultException {{(pid=61215) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2480.320509] env[61215]: DEBUG nova.compute.manager [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Build of instance 59d93243-c15c-4554-863b-779d94b3d858 was re-scheduled: A specified parameter was not correct: fileType [ 2480.320509] env[61215]: Faults: ['InvalidArgument'] {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2480.320904] env[61215]: DEBUG nova.compute.manager [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Unplugging VIFs for instance {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2480.321165] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Acquiring lock "refresh_cache-59d93243-c15c-4554-863b-779d94b3d858" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2480.321316] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Acquired lock "refresh_cache-59d93243-c15c-4554-863b-779d94b3d858" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2480.321479] env[61215]: DEBUG nova.network.neutron [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2480.344652] env[61215]: DEBUG nova.network.neutron [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2480.409360] env[61215]: DEBUG nova.network.neutron [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2480.418750] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Releasing lock "refresh_cache-59d93243-c15c-4554-863b-779d94b3d858" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2480.418983] env[61215]: DEBUG nova.compute.manager [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2480.419189] env[61215]: DEBUG nova.compute.manager [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Skipping network deallocation for instance since networking was not requested. {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2277}} [ 2480.497179] env[61215]: INFO nova.scheduler.client.report [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Deleted allocations for instance 59d93243-c15c-4554-863b-779d94b3d858 [ 2480.515879] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d21214a3-ab65-46b0-bea4-220d80d880ad tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Lock "59d93243-c15c-4554-863b-779d94b3d858" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 622.337s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2480.516161] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ddcb96b7-24f1-454b-b3f3-62f8319bc4b6 tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Lock "59d93243-c15c-4554-863b-779d94b3d858" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 426.366s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2480.516381] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ddcb96b7-24f1-454b-b3f3-62f8319bc4b6 tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Acquiring lock "59d93243-c15c-4554-863b-779d94b3d858-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2480.516587] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ddcb96b7-24f1-454b-b3f3-62f8319bc4b6 tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Lock "59d93243-c15c-4554-863b-779d94b3d858-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2480.516760] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ddcb96b7-24f1-454b-b3f3-62f8319bc4b6 tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Lock "59d93243-c15c-4554-863b-779d94b3d858-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2480.518602] env[61215]: INFO nova.compute.manager [None req-ddcb96b7-24f1-454b-b3f3-62f8319bc4b6 tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Terminating instance [ 2480.520160] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ddcb96b7-24f1-454b-b3f3-62f8319bc4b6 tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Acquiring lock "refresh_cache-59d93243-c15c-4554-863b-779d94b3d858" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2480.520324] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ddcb96b7-24f1-454b-b3f3-62f8319bc4b6 tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Acquired lock "refresh_cache-59d93243-c15c-4554-863b-779d94b3d858" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2480.520494] env[61215]: DEBUG nova.network.neutron [None req-ddcb96b7-24f1-454b-b3f3-62f8319bc4b6 tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2480.547765] env[61215]: DEBUG nova.network.neutron [None req-ddcb96b7-24f1-454b-b3f3-62f8319bc4b6 tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2480.604984] env[61215]: DEBUG nova.network.neutron [None req-ddcb96b7-24f1-454b-b3f3-62f8319bc4b6 tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2480.613584] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ddcb96b7-24f1-454b-b3f3-62f8319bc4b6 tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Releasing lock "refresh_cache-59d93243-c15c-4554-863b-779d94b3d858" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2480.614074] env[61215]: DEBUG nova.compute.manager [None req-ddcb96b7-24f1-454b-b3f3-62f8319bc4b6 tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2480.614257] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-ddcb96b7-24f1-454b-b3f3-62f8319bc4b6 tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2480.614806] env[61215]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2b79ea2a-1e1e-4553-a88d-433f4704ce42 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2480.624932] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d155b148-5e0f-477b-8de7-6633ad27fabc {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2480.653157] env[61215]: WARNING nova.virt.vmwareapi.vmops [None req-ddcb96b7-24f1-454b-b3f3-62f8319bc4b6 tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 59d93243-c15c-4554-863b-779d94b3d858 could not be found. [ 2480.653359] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-ddcb96b7-24f1-454b-b3f3-62f8319bc4b6 tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2480.653545] env[61215]: INFO nova.compute.manager [None req-ddcb96b7-24f1-454b-b3f3-62f8319bc4b6 tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2480.653796] env[61215]: DEBUG oslo.service.loopingcall [None req-ddcb96b7-24f1-454b-b3f3-62f8319bc4b6 tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2480.654062] env[61215]: DEBUG nova.compute.manager [-] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2480.654138] env[61215]: DEBUG nova.network.neutron [-] [instance: 59d93243-c15c-4554-863b-779d94b3d858] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2480.754749] env[61215]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61215) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 2480.755017] env[61215]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-8dad50e6-1b05-48e0-90a3-c930d8346674'] [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall self._deallocate_network( [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2480.755557] env[61215]: ERROR oslo.service.loopingcall [ 2480.757177] env[61215]: ERROR nova.compute.manager [None req-ddcb96b7-24f1-454b-b3f3-62f8319bc4b6 tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2480.789483] env[61215]: ERROR nova.compute.manager [None req-ddcb96b7-24f1-454b-b3f3-62f8319bc4b6 tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2480.789483] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] Traceback (most recent call last): [ 2480.789483] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2480.789483] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] ret = obj(*args, **kwargs) [ 2480.789483] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2480.789483] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] exception_handler_v20(status_code, error_body) [ 2480.789483] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2480.789483] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] raise client_exc(message=error_message, [ 2480.789483] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2480.789483] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] Neutron server returns request_ids: ['req-8dad50e6-1b05-48e0-90a3-c930d8346674'] [ 2480.789483] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] [ 2480.789483] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] During handling of the above exception, another exception occurred: [ 2480.789483] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] [ 2480.789483] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] Traceback (most recent call last): [ 2480.789483] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 2480.789483] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] self._delete_instance(context, instance, bdms) [ 2480.789483] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 2480.789483] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] self._shutdown_instance(context, instance, bdms) [ 2480.789483] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 2480.789483] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] self._try_deallocate_network(context, instance, requested_networks) [ 2480.789483] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 2480.789483] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] with excutils.save_and_reraise_exception(): [ 2480.789483] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2480.789483] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] self.force_reraise() [ 2480.789483] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2480.789483] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] raise self.value [ 2480.789483] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 2480.789483] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] _deallocate_network_with_retries() [ 2480.789483] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 2480.789483] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] return evt.wait() [ 2480.789483] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2480.789483] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] result = hub.switch() [ 2480.789483] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2480.789483] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] return self.greenlet.switch() [ 2480.789483] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2480.789483] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] result = func(*self.args, **self.kw) [ 2480.789483] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2480.790873] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] result = f(*args, **kwargs) [ 2480.790873] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 2480.790873] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] self._deallocate_network( [ 2480.790873] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 2480.790873] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] self.network_api.deallocate_for_instance( [ 2480.790873] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2480.790873] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] data = neutron.list_ports(**search_opts) [ 2480.790873] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2480.790873] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] ret = obj(*args, **kwargs) [ 2480.790873] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2480.790873] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] return self.list('ports', self.ports_path, retrieve_all, [ 2480.790873] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2480.790873] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] ret = obj(*args, **kwargs) [ 2480.790873] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2480.790873] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] for r in self._pagination(collection, path, **params): [ 2480.790873] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2480.790873] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] res = self.get(path, params=params) [ 2480.790873] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2480.790873] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] ret = obj(*args, **kwargs) [ 2480.790873] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2480.790873] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] return self.retry_request("GET", action, body=body, [ 2480.790873] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2480.790873] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] ret = obj(*args, **kwargs) [ 2480.790873] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2480.790873] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] return self.do_request(method, action, body=body, [ 2480.790873] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2480.790873] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] ret = obj(*args, **kwargs) [ 2480.790873] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2480.790873] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] self._handle_fault_response(status_code, replybody, resp) [ 2480.790873] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2480.790873] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2480.790873] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2480.790873] env[61215]: ERROR nova.compute.manager [instance: 59d93243-c15c-4554-863b-779d94b3d858] [ 2480.817539] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ddcb96b7-24f1-454b-b3f3-62f8319bc4b6 tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Lock "59d93243-c15c-4554-863b-779d94b3d858" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.301s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2480.818749] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "59d93243-c15c-4554-863b-779d94b3d858" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 38.824s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2480.818994] env[61215]: INFO nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 59d93243-c15c-4554-863b-779d94b3d858] During sync_power_state the instance has a pending task (deleting). Skip. [ 2480.819198] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "59d93243-c15c-4554-863b-779d94b3d858" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2480.866185] env[61215]: INFO nova.compute.manager [None req-ddcb96b7-24f1-454b-b3f3-62f8319bc4b6 tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] [instance: 59d93243-c15c-4554-863b-779d94b3d858] Successfully reverted task state from None on failure for instance. [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server [None req-ddcb96b7-24f1-454b-b3f3-62f8319bc4b6 tempest-ServerShowV247Test-1937637954 tempest-ServerShowV247Test-1937637954-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-8dad50e6-1b05-48e0-90a3-c930d8346674'] [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server raise self.value [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server raise self.value [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server raise self.value [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3345, in terminate_instance [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in do_terminate_instance [ 2480.869589] env[61215]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server raise self.value [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server raise self.value [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server return evt.wait() [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2480.871554] env[61215]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2480.873275] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2480.873275] env[61215]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 2480.873275] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2480.873275] env[61215]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2480.873275] env[61215]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2480.873275] env[61215]: ERROR oslo_messaging.rpc.server [ 2483.344136] env[61215]: DEBUG oslo_concurrency.lockutils [None req-43615b5e-2186-4b3e-822c-99e4b93bd56e tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Acquiring lock "02265af9-44e6-4341-ba30-be7caad7da8b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2483.842398] env[61215]: DEBUG oslo_concurrency.lockutils [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquiring lock "e13fe4b8-f445-46f6-a896-8db6fd85fa71" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2483.842627] env[61215]: DEBUG oslo_concurrency.lockutils [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Lock "e13fe4b8-f445-46f6-a896-8db6fd85fa71" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2483.853020] env[61215]: DEBUG nova.compute.manager [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2483.901735] env[61215]: DEBUG oslo_concurrency.lockutils [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2483.902048] env[61215]: DEBUG oslo_concurrency.lockutils [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2483.903445] env[61215]: INFO nova.compute.claims [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2484.059983] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61110fd0-2ab6-49ed-aff1-a191dc09f5f3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2484.067271] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-139b748e-a240-4d93-8d7b-9e82fdf70cf0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2484.097643] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eaaf729-2fed-4558-9727-bdeeb2c5ef02 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2484.104653] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db259a1b-340d-4941-b92f-86200ea07cfa {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2484.117362] env[61215]: DEBUG nova.compute.provider_tree [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2484.127640] env[61215]: DEBUG nova.scheduler.client.report [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2484.142492] env[61215]: DEBUG oslo_concurrency.lockutils [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.240s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2484.142990] env[61215]: DEBUG nova.compute.manager [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Start building networks asynchronously for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2484.174706] env[61215]: DEBUG nova.compute.utils [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Using /dev/sd instead of None {{(pid=61215) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2484.176531] env[61215]: DEBUG nova.compute.manager [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Allocating IP information in the background. {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2484.176721] env[61215]: DEBUG nova.network.neutron [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] allocate_for_instance() {{(pid=61215) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2484.187199] env[61215]: DEBUG nova.compute.manager [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Start building block device mappings for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2484.236078] env[61215]: DEBUG nova.policy [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c9db5492250b426c80f611d7a5686c1b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd3eac98da0cb41cbad12d92e9151b143', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61215) authorize /opt/stack/nova/nova/policy.py:203}} [ 2484.247011] env[61215]: DEBUG nova.compute.manager [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Start spawning the instance on the hypervisor. {{(pid=61215) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2484.275474] env[61215]: DEBUG nova.virt.hardware [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:05:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:04:45Z,direct_url=,disk_format='vmdk',id=e91f0c25-9ff9-4937-8440-f47cfb2028bc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9bc3f57c82894c5d9e08b66e77a25dc5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:04:46Z,virtual_size=,visibility=), allow threads: False {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2484.275756] env[61215]: DEBUG nova.virt.hardware [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Flavor limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2484.275946] env[61215]: DEBUG nova.virt.hardware [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Image limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2484.276161] env[61215]: DEBUG nova.virt.hardware [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Flavor pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2484.276316] env[61215]: DEBUG nova.virt.hardware [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Image pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2484.276467] env[61215]: DEBUG nova.virt.hardware [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2484.276677] env[61215]: DEBUG nova.virt.hardware [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2484.276839] env[61215]: DEBUG nova.virt.hardware [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2484.277016] env[61215]: DEBUG nova.virt.hardware [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Got 1 possible topologies {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2484.277186] env[61215]: DEBUG nova.virt.hardware [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2484.277362] env[61215]: DEBUG nova.virt.hardware [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2484.278276] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e870d5e5-e349-409d-9d43-305e114a6cee {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2484.287048] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4818925-a6b4-46ec-adba-a2f2be9b1f0e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2484.573239] env[61215]: DEBUG nova.network.neutron [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Successfully created port: 1c01d626-4f07-4bbc-a521-fda30b3e698d {{(pid=61215) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2485.371120] env[61215]: DEBUG nova.compute.manager [req-3f65c733-d9b7-4e85-93a6-4e89432afce5 req-a975edba-2ae0-4dbe-a9ec-db9a8e5b8f48 service nova] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Received event network-vif-plugged-1c01d626-4f07-4bbc-a521-fda30b3e698d {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 2485.371360] env[61215]: DEBUG oslo_concurrency.lockutils [req-3f65c733-d9b7-4e85-93a6-4e89432afce5 req-a975edba-2ae0-4dbe-a9ec-db9a8e5b8f48 service nova] Acquiring lock "e13fe4b8-f445-46f6-a896-8db6fd85fa71-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2485.371575] env[61215]: DEBUG oslo_concurrency.lockutils [req-3f65c733-d9b7-4e85-93a6-4e89432afce5 req-a975edba-2ae0-4dbe-a9ec-db9a8e5b8f48 service nova] Lock "e13fe4b8-f445-46f6-a896-8db6fd85fa71-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2485.371749] env[61215]: DEBUG oslo_concurrency.lockutils [req-3f65c733-d9b7-4e85-93a6-4e89432afce5 req-a975edba-2ae0-4dbe-a9ec-db9a8e5b8f48 service nova] Lock "e13fe4b8-f445-46f6-a896-8db6fd85fa71-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2485.371923] env[61215]: DEBUG nova.compute.manager [req-3f65c733-d9b7-4e85-93a6-4e89432afce5 req-a975edba-2ae0-4dbe-a9ec-db9a8e5b8f48 service nova] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] No waiting events found dispatching network-vif-plugged-1c01d626-4f07-4bbc-a521-fda30b3e698d {{(pid=61215) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2485.372108] env[61215]: WARNING nova.compute.manager [req-3f65c733-d9b7-4e85-93a6-4e89432afce5 req-a975edba-2ae0-4dbe-a9ec-db9a8e5b8f48 service nova] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Received unexpected event network-vif-plugged-1c01d626-4f07-4bbc-a521-fda30b3e698d for instance with vm_state building and task_state spawning. [ 2485.459550] env[61215]: DEBUG nova.network.neutron [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Successfully updated port: 1c01d626-4f07-4bbc-a521-fda30b3e698d {{(pid=61215) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2485.472515] env[61215]: DEBUG oslo_concurrency.lockutils [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquiring lock "refresh_cache-e13fe4b8-f445-46f6-a896-8db6fd85fa71" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2485.472656] env[61215]: DEBUG oslo_concurrency.lockutils [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquired lock "refresh_cache-e13fe4b8-f445-46f6-a896-8db6fd85fa71" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2485.472798] env[61215]: DEBUG nova.network.neutron [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2485.514086] env[61215]: DEBUG nova.network.neutron [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2485.707637] env[61215]: DEBUG nova.network.neutron [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Updating instance_info_cache with network_info: [{"id": "1c01d626-4f07-4bbc-a521-fda30b3e698d", "address": "fa:16:3e:f4:29:48", "network": {"id": "a742dd7e-4910-4f56-91c4-28c6da7554f1", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2129807098-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3eac98da0cb41cbad12d92e9151b143", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c01d626-4f", "ovs_interfaceid": "1c01d626-4f07-4bbc-a521-fda30b3e698d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2485.719246] env[61215]: DEBUG oslo_concurrency.lockutils [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Releasing lock "refresh_cache-e13fe4b8-f445-46f6-a896-8db6fd85fa71" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2485.719541] env[61215]: DEBUG nova.compute.manager [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Instance network_info: |[{"id": "1c01d626-4f07-4bbc-a521-fda30b3e698d", "address": "fa:16:3e:f4:29:48", "network": {"id": "a742dd7e-4910-4f56-91c4-28c6da7554f1", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2129807098-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3eac98da0cb41cbad12d92e9151b143", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c01d626-4f", "ovs_interfaceid": "1c01d626-4f07-4bbc-a521-fda30b3e698d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2485.719950] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f4:29:48', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b98c49ac-0eb7-4311-aa8f-60581b2ce706', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1c01d626-4f07-4bbc-a521-fda30b3e698d', 'vif_model': 'vmxnet3'}] {{(pid=61215) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2485.727872] env[61215]: DEBUG oslo.service.loopingcall [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2485.728131] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Creating VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2485.728367] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-007bfc18-7235-4224-b107-a8284faca3c8 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2485.749013] env[61215]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2485.749013] env[61215]: value = "task-1690446" [ 2485.749013] env[61215]: _type = "Task" [ 2485.749013] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2485.757361] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690446, 'name': CreateVM_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2486.261405] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690446, 'name': CreateVM_Task, 'duration_secs': 0.36777} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2486.261662] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Created VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2486.262159] env[61215]: DEBUG oslo_concurrency.lockutils [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2486.262330] env[61215]: DEBUG oslo_concurrency.lockutils [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2486.262648] env[61215]: DEBUG oslo_concurrency.lockutils [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2486.262887] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-747636d0-0f2f-4783-b3dd-8bc9e02665a0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2486.267239] env[61215]: DEBUG oslo_vmware.api [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Waiting for the task: (returnval){ [ 2486.267239] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52be2010-3ca4-0bbf-2b25-40b3459fbdbe" [ 2486.267239] env[61215]: _type = "Task" [ 2486.267239] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2486.274310] env[61215]: DEBUG oslo_vmware.api [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52be2010-3ca4-0bbf-2b25-40b3459fbdbe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2486.777986] env[61215]: DEBUG oslo_concurrency.lockutils [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2486.778318] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Processing image e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2486.778450] env[61215]: DEBUG oslo_concurrency.lockutils [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2487.399430] env[61215]: DEBUG nova.compute.manager [req-0af5e6ba-9ba4-463c-9139-4593c750ee84 req-32ce5fc3-94bd-4d3a-857c-d5678803b946 service nova] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Received event network-changed-1c01d626-4f07-4bbc-a521-fda30b3e698d {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 2487.399636] env[61215]: DEBUG nova.compute.manager [req-0af5e6ba-9ba4-463c-9139-4593c750ee84 req-32ce5fc3-94bd-4d3a-857c-d5678803b946 service nova] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Refreshing instance network info cache due to event network-changed-1c01d626-4f07-4bbc-a521-fda30b3e698d. {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 2487.399862] env[61215]: DEBUG oslo_concurrency.lockutils [req-0af5e6ba-9ba4-463c-9139-4593c750ee84 req-32ce5fc3-94bd-4d3a-857c-d5678803b946 service nova] Acquiring lock "refresh_cache-e13fe4b8-f445-46f6-a896-8db6fd85fa71" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2487.400015] env[61215]: DEBUG oslo_concurrency.lockutils [req-0af5e6ba-9ba4-463c-9139-4593c750ee84 req-32ce5fc3-94bd-4d3a-857c-d5678803b946 service nova] Acquired lock "refresh_cache-e13fe4b8-f445-46f6-a896-8db6fd85fa71" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2487.400222] env[61215]: DEBUG nova.network.neutron [req-0af5e6ba-9ba4-463c-9139-4593c750ee84 req-32ce5fc3-94bd-4d3a-857c-d5678803b946 service nova] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Refreshing network info cache for port 1c01d626-4f07-4bbc-a521-fda30b3e698d {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2487.680350] env[61215]: DEBUG nova.network.neutron [req-0af5e6ba-9ba4-463c-9139-4593c750ee84 req-32ce5fc3-94bd-4d3a-857c-d5678803b946 service nova] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Updated VIF entry in instance network info cache for port 1c01d626-4f07-4bbc-a521-fda30b3e698d. {{(pid=61215) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2487.680687] env[61215]: DEBUG nova.network.neutron [req-0af5e6ba-9ba4-463c-9139-4593c750ee84 req-32ce5fc3-94bd-4d3a-857c-d5678803b946 service nova] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Updating instance_info_cache with network_info: [{"id": "1c01d626-4f07-4bbc-a521-fda30b3e698d", "address": "fa:16:3e:f4:29:48", "network": {"id": "a742dd7e-4910-4f56-91c4-28c6da7554f1", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2129807098-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3eac98da0cb41cbad12d92e9151b143", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c01d626-4f", "ovs_interfaceid": "1c01d626-4f07-4bbc-a521-fda30b3e698d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2487.689783] env[61215]: DEBUG oslo_concurrency.lockutils [req-0af5e6ba-9ba4-463c-9139-4593c750ee84 req-32ce5fc3-94bd-4d3a-857c-d5678803b946 service nova] Releasing lock "refresh_cache-e13fe4b8-f445-46f6-a896-8db6fd85fa71" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2517.672129] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2523.657103] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2524.937094] env[61215]: DEBUG oslo_concurrency.lockutils [None req-72b6c2ba-30e2-48ef-9935-532d1f67ab15 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Acquiring lock "81e63102-75dc-4f4b-9b48-a63b2a9123f2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2525.654060] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2527.630223] env[61215]: WARNING oslo_vmware.rw_handles [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2527.630223] env[61215]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2527.630223] env[61215]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2527.630223] env[61215]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2527.630223] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2527.630223] env[61215]: ERROR oslo_vmware.rw_handles response.begin() [ 2527.630223] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2527.630223] env[61215]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2527.630223] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2527.630223] env[61215]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2527.630223] env[61215]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2527.630223] env[61215]: ERROR oslo_vmware.rw_handles [ 2527.630887] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Downloaded image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to vmware_temp/2435bc27-d8fb-4e3e-89dd-e129b8e838c3/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2527.632710] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Caching image {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2527.632954] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Copying Virtual Disk [datastore1] vmware_temp/2435bc27-d8fb-4e3e-89dd-e129b8e838c3/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk to [datastore1] vmware_temp/2435bc27-d8fb-4e3e-89dd-e129b8e838c3/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk {{(pid=61215) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2527.633248] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bcf282b7-ad8b-462c-953a-52db61044d88 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2527.640864] env[61215]: DEBUG oslo_vmware.api [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Waiting for the task: (returnval){ [ 2527.640864] env[61215]: value = "task-1690447" [ 2527.640864] env[61215]: _type = "Task" [ 2527.640864] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2527.649981] env[61215]: DEBUG oslo_vmware.api [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Task: {'id': task-1690447, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2528.151044] env[61215]: DEBUG oslo_vmware.exceptions [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Fault InvalidArgument not matched. {{(pid=61215) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2528.151382] env[61215]: DEBUG oslo_concurrency.lockutils [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2528.151945] env[61215]: ERROR nova.compute.manager [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2528.151945] env[61215]: Faults: ['InvalidArgument'] [ 2528.151945] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] Traceback (most recent call last): [ 2528.151945] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2528.151945] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] yield resources [ 2528.151945] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2528.151945] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] self.driver.spawn(context, instance, image_meta, [ 2528.151945] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2528.151945] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2528.151945] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2528.151945] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] self._fetch_image_if_missing(context, vi) [ 2528.151945] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2528.151945] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] image_cache(vi, tmp_image_ds_loc) [ 2528.151945] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2528.151945] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] vm_util.copy_virtual_disk( [ 2528.151945] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2528.151945] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] session._wait_for_task(vmdk_copy_task) [ 2528.151945] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2528.151945] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] return self.wait_for_task(task_ref) [ 2528.151945] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2528.151945] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] return evt.wait() [ 2528.151945] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2528.151945] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] result = hub.switch() [ 2528.151945] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2528.151945] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] return self.greenlet.switch() [ 2528.151945] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2528.151945] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] self.f(*self.args, **self.kw) [ 2528.151945] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2528.151945] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] raise exceptions.translate_fault(task_info.error) [ 2528.151945] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2528.151945] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] Faults: ['InvalidArgument'] [ 2528.151945] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] [ 2528.153132] env[61215]: INFO nova.compute.manager [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Terminating instance [ 2528.153974] env[61215]: DEBUG oslo_concurrency.lockutils [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2528.154147] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2528.154401] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-135440a9-5118-4279-8f98-0c6bf73059e1 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2528.157101] env[61215]: DEBUG nova.compute.manager [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2528.157101] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2528.157802] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-946df084-b097-41f6-9a93-1d6bd271b593 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2528.165101] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Unregistering the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2528.166163] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-20a55068-628f-4ed4-a807-ab76ef17bf5f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2528.167615] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2528.167796] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61215) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2528.168498] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85ba3723-f296-4ff6-9a35-99a4aac30d8c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2528.174163] env[61215]: DEBUG oslo_vmware.api [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Waiting for the task: (returnval){ [ 2528.174163] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52052ccd-1cce-4d3e-dec7-40ef0d88c42c" [ 2528.174163] env[61215]: _type = "Task" [ 2528.174163] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2528.184671] env[61215]: DEBUG oslo_vmware.api [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52052ccd-1cce-4d3e-dec7-40ef0d88c42c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2528.242504] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Unregistered the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2528.242778] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Deleting contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2528.243009] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Deleting the datastore file [datastore1] f3a3a510-a085-4388-b49d-b4371095b436 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2528.243314] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e1d2c35d-7c17-4898-9459-dabc9b0f705b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2528.250029] env[61215]: DEBUG oslo_vmware.api [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Waiting for the task: (returnval){ [ 2528.250029] env[61215]: value = "task-1690449" [ 2528.250029] env[61215]: _type = "Task" [ 2528.250029] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2528.257471] env[61215]: DEBUG oslo_vmware.api [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Task: {'id': task-1690449, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2528.654546] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2528.654900] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2528.654962] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2528.655086] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61215) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 2528.683557] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Preparing fetch location {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2528.683798] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Creating directory with path [datastore1] vmware_temp/390c3acf-c3c5-4a67-94c0-cabe58ce4ced/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2528.684034] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-211d142b-3464-46a5-a024-03a164f62c35 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2528.695364] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Created directory with path [datastore1] vmware_temp/390c3acf-c3c5-4a67-94c0-cabe58ce4ced/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2528.695511] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Fetch image to [datastore1] vmware_temp/390c3acf-c3c5-4a67-94c0-cabe58ce4ced/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2528.695916] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to [datastore1] vmware_temp/390c3acf-c3c5-4a67-94c0-cabe58ce4ced/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2528.696396] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1978cc78-ef45-4c17-82b2-03c5d3b0fad9 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2528.702800] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2948b775-f22e-4df9-94f6-89cec689499a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2528.711434] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88adba70-7f01-4cba-943e-a39f344a3cf5 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2528.741094] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06243266-1934-426c-8568-5bd81067429d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2528.746468] env[61215]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-79d1d1f5-1703-4efb-b810-325c3115a75d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2528.758285] env[61215]: DEBUG oslo_vmware.api [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Task: {'id': task-1690449, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078267} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2528.758471] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2528.758649] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Deleted contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2528.758822] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2528.758997] env[61215]: INFO nova.compute.manager [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2528.761130] env[61215]: DEBUG nova.compute.claims [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Aborting claim: {{(pid=61215) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2528.761307] env[61215]: DEBUG oslo_concurrency.lockutils [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2528.761522] env[61215]: DEBUG oslo_concurrency.lockutils [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2528.767016] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2528.818497] env[61215]: DEBUG oslo_vmware.rw_handles [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/390c3acf-c3c5-4a67-94c0-cabe58ce4ced/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2528.903841] env[61215]: DEBUG oslo_vmware.rw_handles [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Completed reading data from the image iterator. {{(pid=61215) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2528.903909] env[61215]: DEBUG oslo_vmware.rw_handles [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/390c3acf-c3c5-4a67-94c0-cabe58ce4ced/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2529.024544] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d54e0261-c807-496f-8184-72c42507b92f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2529.031700] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a65888b-60a5-4acc-9dcd-a5c7ce54e221 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2529.060995] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0564751-e8a7-4ab5-bde8-cb0f5e0fa948 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2529.067783] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39a739a3-7e1e-47b7-9dc8-b9e45f09af75 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2529.080330] env[61215]: DEBUG nova.compute.provider_tree [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2529.088384] env[61215]: DEBUG nova.scheduler.client.report [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2529.102211] env[61215]: DEBUG oslo_concurrency.lockutils [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.341s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2529.102709] env[61215]: ERROR nova.compute.manager [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2529.102709] env[61215]: Faults: ['InvalidArgument'] [ 2529.102709] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] Traceback (most recent call last): [ 2529.102709] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2529.102709] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] self.driver.spawn(context, instance, image_meta, [ 2529.102709] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2529.102709] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2529.102709] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2529.102709] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] self._fetch_image_if_missing(context, vi) [ 2529.102709] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2529.102709] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] image_cache(vi, tmp_image_ds_loc) [ 2529.102709] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2529.102709] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] vm_util.copy_virtual_disk( [ 2529.102709] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2529.102709] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] session._wait_for_task(vmdk_copy_task) [ 2529.102709] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2529.102709] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] return self.wait_for_task(task_ref) [ 2529.102709] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2529.102709] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] return evt.wait() [ 2529.102709] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2529.102709] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] result = hub.switch() [ 2529.102709] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2529.102709] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] return self.greenlet.switch() [ 2529.102709] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2529.102709] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] self.f(*self.args, **self.kw) [ 2529.102709] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2529.102709] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] raise exceptions.translate_fault(task_info.error) [ 2529.102709] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2529.102709] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] Faults: ['InvalidArgument'] [ 2529.102709] env[61215]: ERROR nova.compute.manager [instance: f3a3a510-a085-4388-b49d-b4371095b436] [ 2529.103919] env[61215]: DEBUG nova.compute.utils [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] VimFaultException {{(pid=61215) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2529.104876] env[61215]: DEBUG nova.compute.manager [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Build of instance f3a3a510-a085-4388-b49d-b4371095b436 was re-scheduled: A specified parameter was not correct: fileType [ 2529.104876] env[61215]: Faults: ['InvalidArgument'] {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2529.105214] env[61215]: DEBUG nova.compute.manager [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Unplugging VIFs for instance {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2529.105393] env[61215]: DEBUG nova.compute.manager [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2529.105567] env[61215]: DEBUG nova.compute.manager [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2529.105732] env[61215]: DEBUG nova.network.neutron [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2529.455020] env[61215]: DEBUG nova.network.neutron [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2529.465328] env[61215]: INFO nova.compute.manager [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Took 0.36 seconds to deallocate network for instance. [ 2529.560353] env[61215]: INFO nova.scheduler.client.report [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Deleted allocations for instance f3a3a510-a085-4388-b49d-b4371095b436 [ 2529.581096] env[61215]: DEBUG oslo_concurrency.lockutils [None req-52d73528-62df-49f1-93a6-4bbe4e8880aa tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Lock "f3a3a510-a085-4388-b49d-b4371095b436" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 647.467s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2529.581304] env[61215]: DEBUG oslo_concurrency.lockutils [None req-7d45be56-94aa-4b72-9c4b-65288d677f30 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Lock "f3a3a510-a085-4388-b49d-b4371095b436" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 451.893s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2529.581539] env[61215]: DEBUG oslo_concurrency.lockutils [None req-7d45be56-94aa-4b72-9c4b-65288d677f30 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Acquiring lock "f3a3a510-a085-4388-b49d-b4371095b436-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2529.581747] env[61215]: DEBUG oslo_concurrency.lockutils [None req-7d45be56-94aa-4b72-9c4b-65288d677f30 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Lock "f3a3a510-a085-4388-b49d-b4371095b436-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2529.581918] env[61215]: DEBUG oslo_concurrency.lockutils [None req-7d45be56-94aa-4b72-9c4b-65288d677f30 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Lock "f3a3a510-a085-4388-b49d-b4371095b436-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2529.583880] env[61215]: INFO nova.compute.manager [None req-7d45be56-94aa-4b72-9c4b-65288d677f30 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Terminating instance [ 2529.585650] env[61215]: DEBUG nova.compute.manager [None req-7d45be56-94aa-4b72-9c4b-65288d677f30 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2529.585866] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-7d45be56-94aa-4b72-9c4b-65288d677f30 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2529.586380] env[61215]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-11061df7-d5d5-467f-8337-0fa43afb0c35 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2529.598727] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab811f59-de2b-4ce1-a37a-11c3260d0f2e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2529.626531] env[61215]: WARNING nova.virt.vmwareapi.vmops [None req-7d45be56-94aa-4b72-9c4b-65288d677f30 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f3a3a510-a085-4388-b49d-b4371095b436 could not be found. [ 2529.627131] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-7d45be56-94aa-4b72-9c4b-65288d677f30 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2529.627131] env[61215]: INFO nova.compute.manager [None req-7d45be56-94aa-4b72-9c4b-65288d677f30 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2529.627254] env[61215]: DEBUG oslo.service.loopingcall [None req-7d45be56-94aa-4b72-9c4b-65288d677f30 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2529.627403] env[61215]: DEBUG nova.compute.manager [-] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2529.627503] env[61215]: DEBUG nova.network.neutron [-] [instance: f3a3a510-a085-4388-b49d-b4371095b436] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2529.650644] env[61215]: DEBUG nova.network.neutron [-] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2529.654170] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2529.658323] env[61215]: INFO nova.compute.manager [-] [instance: f3a3a510-a085-4388-b49d-b4371095b436] Took 0.03 seconds to deallocate network for instance. [ 2529.663398] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2529.663602] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2529.663764] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2529.663913] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61215) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2529.664934] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24ccbb6f-261c-4c5b-bc3e-9928ea5be203 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2529.674246] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4a1c141-a520-47dc-a029-ec66ad045ffa {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2529.690732] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b677771-7b3a-4f3a-803a-1105fc2ead7d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2529.697696] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51983680-138c-4516-b8da-7a2f9055f8d3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2529.732450] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181323MB free_disk=173GB free_vcpus=48 pci_devices=None {{(pid=61215) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2529.732570] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2529.732752] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2529.752758] env[61215]: DEBUG oslo_concurrency.lockutils [None req-7d45be56-94aa-4b72-9c4b-65288d677f30 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Lock "f3a3a510-a085-4388-b49d-b4371095b436" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.171s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2529.753539] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "f3a3a510-a085-4388-b49d-b4371095b436" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 87.758s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2529.753727] env[61215]: INFO nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: f3a3a510-a085-4388-b49d-b4371095b436] During sync_power_state the instance has a pending task (deleting). Skip. [ 2529.753900] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "f3a3a510-a085-4388-b49d-b4371095b436" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2529.793665] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 17d70ed3-4a82-48c8-95ad-c81fb0772e42 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2529.793868] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance fb5fb791-5f62-4717-8d8f-7d56ffda15be actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2529.794048] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 455e7272-f099-496f-b929-ed6fa9a0ab44 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2529.794214] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 9eacbeb5-b918-4b0f-82f4-d06a037803df actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2529.794366] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 49ab8e42-2da3-474b-b283-9d31b089fd76 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2529.794519] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 799c902d-2bc1-4738-b3af-772a5feea819 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2529.794699] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 02265af9-44e6-4341-ba30-be7caad7da8b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2529.794812] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 81e63102-75dc-4f4b-9b48-a63b2a9123f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2529.794937] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance e13fe4b8-f445-46f6-a896-8db6fd85fa71 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2529.795130] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2529.795272] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2529.901670] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61eacf91-92b9-491c-a78e-c12ae6b49523 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2529.909603] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfee06e5-a09d-4e92-8708-0016d0b322fd {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2529.939272] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd4537de-e8de-4b43-a843-8e96c8d273cb {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2529.945907] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec9673fb-e88f-4b08-8329-99d4fb282756 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2529.958763] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2529.968100] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2529.981526] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61215) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2529.981747] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.249s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2530.982051] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2530.982438] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Starting heal instance info cache {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 2530.982438] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Rebuilding the list of instances to heal {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2531.004554] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2531.004717] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2531.004848] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2531.004978] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2531.005119] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2531.005245] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2531.005366] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2531.005488] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2531.005605] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2531.005726] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Didn't find any instances for network info cache update. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 2532.655024] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2534.651663] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2576.441768] env[61215]: WARNING oslo_vmware.rw_handles [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2576.441768] env[61215]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2576.441768] env[61215]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2576.441768] env[61215]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2576.441768] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2576.441768] env[61215]: ERROR oslo_vmware.rw_handles response.begin() [ 2576.441768] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2576.441768] env[61215]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2576.441768] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2576.441768] env[61215]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2576.441768] env[61215]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2576.441768] env[61215]: ERROR oslo_vmware.rw_handles [ 2576.442544] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Downloaded image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to vmware_temp/390c3acf-c3c5-4a67-94c0-cabe58ce4ced/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2576.444654] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Caching image {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2576.444927] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Copying Virtual Disk [datastore1] vmware_temp/390c3acf-c3c5-4a67-94c0-cabe58ce4ced/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk to [datastore1] vmware_temp/390c3acf-c3c5-4a67-94c0-cabe58ce4ced/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk {{(pid=61215) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2576.445221] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7a183cb6-f112-4815-8980-9d3d2ff37cfc {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2576.457016] env[61215]: DEBUG oslo_vmware.api [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Waiting for the task: (returnval){ [ 2576.457016] env[61215]: value = "task-1690450" [ 2576.457016] env[61215]: _type = "Task" [ 2576.457016] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2576.462827] env[61215]: DEBUG oslo_vmware.api [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Task: {'id': task-1690450, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2576.965388] env[61215]: DEBUG oslo_vmware.exceptions [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Fault InvalidArgument not matched. {{(pid=61215) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2576.965654] env[61215]: DEBUG oslo_concurrency.lockutils [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2576.966254] env[61215]: ERROR nova.compute.manager [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2576.966254] env[61215]: Faults: ['InvalidArgument'] [ 2576.966254] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Traceback (most recent call last): [ 2576.966254] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2576.966254] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] yield resources [ 2576.966254] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2576.966254] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] self.driver.spawn(context, instance, image_meta, [ 2576.966254] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2576.966254] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2576.966254] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2576.966254] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] self._fetch_image_if_missing(context, vi) [ 2576.966254] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2576.966254] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] image_cache(vi, tmp_image_ds_loc) [ 2576.966254] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2576.966254] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] vm_util.copy_virtual_disk( [ 2576.966254] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2576.966254] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] session._wait_for_task(vmdk_copy_task) [ 2576.966254] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2576.966254] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] return self.wait_for_task(task_ref) [ 2576.966254] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2576.966254] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] return evt.wait() [ 2576.966254] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2576.966254] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] result = hub.switch() [ 2576.966254] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2576.966254] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] return self.greenlet.switch() [ 2576.966254] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2576.966254] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] self.f(*self.args, **self.kw) [ 2576.966254] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2576.966254] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] raise exceptions.translate_fault(task_info.error) [ 2576.966254] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2576.966254] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Faults: ['InvalidArgument'] [ 2576.966254] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] [ 2576.967516] env[61215]: INFO nova.compute.manager [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Terminating instance [ 2576.968169] env[61215]: DEBUG oslo_concurrency.lockutils [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2576.968388] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2576.968620] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5bae796c-0636-4ff5-b1aa-f60eb9ce0909 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2576.970752] env[61215]: DEBUG nova.compute.manager [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2576.970952] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2576.971697] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41d3956e-15ab-4e37-933a-188ae4f76488 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2576.978279] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Unregistering the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2576.978488] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7c2cc2da-fe7f-4510-884d-b609d1e8435c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2576.980542] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2576.980715] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61215) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2576.981693] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae7bf46c-5310-4225-9e00-b55d6198d720 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2576.986138] env[61215]: DEBUG oslo_vmware.api [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Waiting for the task: (returnval){ [ 2576.986138] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]5233908a-3c19-0565-1828-e38c89d92986" [ 2576.986138] env[61215]: _type = "Task" [ 2576.986138] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2576.993047] env[61215]: DEBUG oslo_vmware.api [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5233908a-3c19-0565-1828-e38c89d92986, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2577.052403] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Unregistered the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2577.052674] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Deleting contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2577.052873] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Deleting the datastore file [datastore1] 17d70ed3-4a82-48c8-95ad-c81fb0772e42 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2577.053149] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2411a075-76ed-4eb5-8c6a-28b96f03bf98 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2577.059114] env[61215]: DEBUG oslo_vmware.api [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Waiting for the task: (returnval){ [ 2577.059114] env[61215]: value = "task-1690452" [ 2577.059114] env[61215]: _type = "Task" [ 2577.059114] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2577.066570] env[61215]: DEBUG oslo_vmware.api [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Task: {'id': task-1690452, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2577.496237] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Preparing fetch location {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2577.496636] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Creating directory with path [datastore1] vmware_temp/15a049db-65c4-4ed7-8f62-0fd2c0f6480d/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2577.496867] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ac4155a7-0ada-443a-aa01-743ffede164b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2577.508578] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Created directory with path [datastore1] vmware_temp/15a049db-65c4-4ed7-8f62-0fd2c0f6480d/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2577.508771] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Fetch image to [datastore1] vmware_temp/15a049db-65c4-4ed7-8f62-0fd2c0f6480d/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2577.508968] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to [datastore1] vmware_temp/15a049db-65c4-4ed7-8f62-0fd2c0f6480d/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2577.509789] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b396fd8-f056-4379-b3dc-e65678bfaed0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2577.516273] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2c9b495-3ca6-4cfa-abfa-9c69ecd2b8a4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2577.525199] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53242cb3-4e5b-463c-b305-45976aaad1a0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2577.556464] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee14be6b-7d97-4029-a0ab-f34a11ca81bc {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2577.564789] env[61215]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-edce51ee-3057-4905-9a56-ca4173c903fa {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2577.569071] env[61215]: DEBUG oslo_vmware.api [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Task: {'id': task-1690452, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075291} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2577.569607] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2577.569797] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Deleted contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2577.570029] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2577.570228] env[61215]: INFO nova.compute.manager [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2577.572348] env[61215]: DEBUG nova.compute.claims [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Aborting claim: {{(pid=61215) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2577.572514] env[61215]: DEBUG oslo_concurrency.lockutils [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2577.572736] env[61215]: DEBUG oslo_concurrency.lockutils [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2577.592035] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2577.730273] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad355ae3-1e8f-4151-a746-9858514b0809 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2577.737872] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fe0dfa0-8738-4bbf-8a25-f0e27229beab {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2577.768848] env[61215]: DEBUG oslo_vmware.rw_handles [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/15a049db-65c4-4ed7-8f62-0fd2c0f6480d/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2577.770699] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b5dc8b9-c4f9-4442-b05b-7d213599d8df {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2577.831268] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a411140d-694c-42b7-bc2a-ce03cb5c6e48 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2577.838295] env[61215]: DEBUG oslo_vmware.rw_handles [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Completed reading data from the image iterator. {{(pid=61215) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2577.838472] env[61215]: DEBUG oslo_vmware.rw_handles [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/15a049db-65c4-4ed7-8f62-0fd2c0f6480d/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2577.847239] env[61215]: DEBUG nova.compute.provider_tree [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2577.857610] env[61215]: DEBUG nova.scheduler.client.report [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2577.871146] env[61215]: DEBUG oslo_concurrency.lockutils [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.298s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2577.871601] env[61215]: ERROR nova.compute.manager [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2577.871601] env[61215]: Faults: ['InvalidArgument'] [ 2577.871601] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Traceback (most recent call last): [ 2577.871601] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2577.871601] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] self.driver.spawn(context, instance, image_meta, [ 2577.871601] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2577.871601] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2577.871601] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2577.871601] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] self._fetch_image_if_missing(context, vi) [ 2577.871601] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2577.871601] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] image_cache(vi, tmp_image_ds_loc) [ 2577.871601] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2577.871601] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] vm_util.copy_virtual_disk( [ 2577.871601] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2577.871601] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] session._wait_for_task(vmdk_copy_task) [ 2577.871601] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2577.871601] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] return self.wait_for_task(task_ref) [ 2577.871601] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2577.871601] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] return evt.wait() [ 2577.871601] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2577.871601] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] result = hub.switch() [ 2577.871601] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2577.871601] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] return self.greenlet.switch() [ 2577.871601] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2577.871601] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] self.f(*self.args, **self.kw) [ 2577.871601] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2577.871601] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] raise exceptions.translate_fault(task_info.error) [ 2577.871601] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2577.871601] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Faults: ['InvalidArgument'] [ 2577.871601] env[61215]: ERROR nova.compute.manager [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] [ 2577.872488] env[61215]: DEBUG nova.compute.utils [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] VimFaultException {{(pid=61215) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2577.873624] env[61215]: DEBUG nova.compute.manager [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Build of instance 17d70ed3-4a82-48c8-95ad-c81fb0772e42 was re-scheduled: A specified parameter was not correct: fileType [ 2577.873624] env[61215]: Faults: ['InvalidArgument'] {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2577.874011] env[61215]: DEBUG nova.compute.manager [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Unplugging VIFs for instance {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2577.874196] env[61215]: DEBUG nova.compute.manager [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2577.874369] env[61215]: DEBUG nova.compute.manager [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2577.874535] env[61215]: DEBUG nova.network.neutron [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2578.360333] env[61215]: DEBUG nova.network.neutron [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2578.371316] env[61215]: INFO nova.compute.manager [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Took 0.50 seconds to deallocate network for instance. [ 2578.472875] env[61215]: INFO nova.scheduler.client.report [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Deleted allocations for instance 17d70ed3-4a82-48c8-95ad-c81fb0772e42 [ 2578.493960] env[61215]: DEBUG oslo_concurrency.lockutils [None req-faaba43b-942c-4cbc-af2e-e23a2bb061f9 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Lock "17d70ed3-4a82-48c8-95ad-c81fb0772e42" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 685.521s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2578.494234] env[61215]: DEBUG oslo_concurrency.lockutils [None req-59ea7b36-68f6-42da-8d3e-a11abe9b7350 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Lock "17d70ed3-4a82-48c8-95ad-c81fb0772e42" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 488.956s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2578.494454] env[61215]: DEBUG oslo_concurrency.lockutils [None req-59ea7b36-68f6-42da-8d3e-a11abe9b7350 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Acquiring lock "17d70ed3-4a82-48c8-95ad-c81fb0772e42-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2578.494663] env[61215]: DEBUG oslo_concurrency.lockutils [None req-59ea7b36-68f6-42da-8d3e-a11abe9b7350 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Lock "17d70ed3-4a82-48c8-95ad-c81fb0772e42-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2578.494833] env[61215]: DEBUG oslo_concurrency.lockutils [None req-59ea7b36-68f6-42da-8d3e-a11abe9b7350 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Lock "17d70ed3-4a82-48c8-95ad-c81fb0772e42-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2578.496808] env[61215]: INFO nova.compute.manager [None req-59ea7b36-68f6-42da-8d3e-a11abe9b7350 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Terminating instance [ 2578.498501] env[61215]: DEBUG nova.compute.manager [None req-59ea7b36-68f6-42da-8d3e-a11abe9b7350 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2578.498697] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-59ea7b36-68f6-42da-8d3e-a11abe9b7350 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2578.499226] env[61215]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0b5f8264-a8fb-4ec8-8d33-1f4213ee9b16 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2578.508166] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecfbf166-e575-495e-9357-2e78abce46c9 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2578.535335] env[61215]: WARNING nova.virt.vmwareapi.vmops [None req-59ea7b36-68f6-42da-8d3e-a11abe9b7350 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 17d70ed3-4a82-48c8-95ad-c81fb0772e42 could not be found. [ 2578.535540] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-59ea7b36-68f6-42da-8d3e-a11abe9b7350 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2578.535720] env[61215]: INFO nova.compute.manager [None req-59ea7b36-68f6-42da-8d3e-a11abe9b7350 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2578.535960] env[61215]: DEBUG oslo.service.loopingcall [None req-59ea7b36-68f6-42da-8d3e-a11abe9b7350 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2578.536198] env[61215]: DEBUG nova.compute.manager [-] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2578.536287] env[61215]: DEBUG nova.network.neutron [-] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2578.565864] env[61215]: DEBUG nova.network.neutron [-] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2578.573883] env[61215]: INFO nova.compute.manager [-] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] Took 0.04 seconds to deallocate network for instance. [ 2578.662285] env[61215]: DEBUG oslo_concurrency.lockutils [None req-59ea7b36-68f6-42da-8d3e-a11abe9b7350 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Lock "17d70ed3-4a82-48c8-95ad-c81fb0772e42" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.168s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2578.664258] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "17d70ed3-4a82-48c8-95ad-c81fb0772e42" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 136.668s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2578.664258] env[61215]: INFO nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 17d70ed3-4a82-48c8-95ad-c81fb0772e42] During sync_power_state the instance has a pending task (deleting). Skip. [ 2578.664258] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "17d70ed3-4a82-48c8-95ad-c81fb0772e42" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2579.669456] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2583.654750] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2587.654589] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2588.654244] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2588.654659] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61215) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 2589.654613] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2590.655869] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2590.655869] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2590.667792] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2590.668095] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2590.668207] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2590.668375] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61215) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2590.669585] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5576bf6e-4fc5-4959-b41e-f0fd869b614f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2590.679922] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0b3ff6d-2ed1-44b8-bcb8-45f922b7ebed {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2590.693212] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f1cf730-da65-4d82-9b5d-0f080ae71fba {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2590.699203] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-726c630d-ae0f-4d48-9748-476f12b643b3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2590.726943] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181323MB free_disk=173GB free_vcpus=48 pci_devices=None {{(pid=61215) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2590.727104] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2590.727298] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2590.792333] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance fb5fb791-5f62-4717-8d8f-7d56ffda15be actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2590.792497] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 455e7272-f099-496f-b929-ed6fa9a0ab44 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2590.792627] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 9eacbeb5-b918-4b0f-82f4-d06a037803df actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2590.792752] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 49ab8e42-2da3-474b-b283-9d31b089fd76 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2590.792872] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 799c902d-2bc1-4738-b3af-772a5feea819 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2590.793017] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 02265af9-44e6-4341-ba30-be7caad7da8b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2590.793138] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 81e63102-75dc-4f4b-9b48-a63b2a9123f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2590.793285] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance e13fe4b8-f445-46f6-a896-8db6fd85fa71 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2590.793482] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2590.793625] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1536MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2590.891198] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-798c1af3-a744-4328-ba52-044c49195e39 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2590.898819] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46b83087-516a-4da3-bfd4-c8a46374c21d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2590.928672] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-796c3a4e-d0b2-42d5-8686-c4c7212e755b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2590.935133] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe21b0bd-9c61-4a04-ac98-699890ef6212 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2590.947400] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2590.956220] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2590.971276] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61215) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2590.971446] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.244s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2591.970756] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2591.971073] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Starting heal instance info cache {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 2591.971114] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Rebuilding the list of instances to heal {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2591.988843] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2591.989036] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2591.989185] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2591.989316] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2591.989441] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2591.989562] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2591.989682] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2591.989800] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2591.989919] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Didn't find any instances for network info cache update. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 2592.654609] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2623.805639] env[61215]: WARNING oslo_vmware.rw_handles [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2623.805639] env[61215]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2623.805639] env[61215]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2623.805639] env[61215]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2623.805639] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2623.805639] env[61215]: ERROR oslo_vmware.rw_handles response.begin() [ 2623.805639] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2623.805639] env[61215]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2623.805639] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2623.805639] env[61215]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2623.805639] env[61215]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2623.805639] env[61215]: ERROR oslo_vmware.rw_handles [ 2623.806376] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Downloaded image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to vmware_temp/15a049db-65c4-4ed7-8f62-0fd2c0f6480d/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2623.808724] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Caching image {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2623.809081] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Copying Virtual Disk [datastore1] vmware_temp/15a049db-65c4-4ed7-8f62-0fd2c0f6480d/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk to [datastore1] vmware_temp/15a049db-65c4-4ed7-8f62-0fd2c0f6480d/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk {{(pid=61215) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2623.809365] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3390bd3b-bbf4-407c-be25-2de57e834333 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2623.817371] env[61215]: DEBUG oslo_vmware.api [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Waiting for the task: (returnval){ [ 2623.817371] env[61215]: value = "task-1690453" [ 2623.817371] env[61215]: _type = "Task" [ 2623.817371] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2623.825470] env[61215]: DEBUG oslo_vmware.api [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Task: {'id': task-1690453, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2624.327293] env[61215]: DEBUG oslo_vmware.exceptions [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Fault InvalidArgument not matched. {{(pid=61215) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2624.327581] env[61215]: DEBUG oslo_concurrency.lockutils [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2624.328171] env[61215]: ERROR nova.compute.manager [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2624.328171] env[61215]: Faults: ['InvalidArgument'] [ 2624.328171] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Traceback (most recent call last): [ 2624.328171] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2624.328171] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] yield resources [ 2624.328171] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2624.328171] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] self.driver.spawn(context, instance, image_meta, [ 2624.328171] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2624.328171] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2624.328171] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2624.328171] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] self._fetch_image_if_missing(context, vi) [ 2624.328171] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2624.328171] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] image_cache(vi, tmp_image_ds_loc) [ 2624.328171] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2624.328171] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] vm_util.copy_virtual_disk( [ 2624.328171] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2624.328171] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] session._wait_for_task(vmdk_copy_task) [ 2624.328171] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2624.328171] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] return self.wait_for_task(task_ref) [ 2624.328171] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2624.328171] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] return evt.wait() [ 2624.328171] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2624.328171] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] result = hub.switch() [ 2624.328171] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2624.328171] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] return self.greenlet.switch() [ 2624.328171] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2624.328171] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] self.f(*self.args, **self.kw) [ 2624.328171] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2624.328171] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] raise exceptions.translate_fault(task_info.error) [ 2624.328171] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2624.328171] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Faults: ['InvalidArgument'] [ 2624.328171] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] [ 2624.329361] env[61215]: INFO nova.compute.manager [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Terminating instance [ 2624.330866] env[61215]: DEBUG oslo_concurrency.lockutils [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2624.330866] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2624.330866] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a0ac2dfa-a4bf-428f-a01b-7822aef5f7a1 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2624.332905] env[61215]: DEBUG nova.compute.manager [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2624.333118] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2624.333826] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e46cabdd-a169-436b-a164-04ebd737a144 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2624.340275] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Unregistering the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2624.340504] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b05f5de7-1d33-481f-a0da-ef18ea0a1fda {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2624.342622] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2624.342796] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61215) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2624.343753] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-855cad40-8d0b-4276-b4fa-925ddf0d7afe {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2624.348633] env[61215]: DEBUG oslo_vmware.api [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Waiting for the task: (returnval){ [ 2624.348633] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52475b14-501f-54ec-306e-e8d9768973fe" [ 2624.348633] env[61215]: _type = "Task" [ 2624.348633] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2624.358419] env[61215]: DEBUG oslo_vmware.api [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52475b14-501f-54ec-306e-e8d9768973fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2624.409150] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Unregistered the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2624.409389] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Deleting contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2624.409594] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Deleting the datastore file [datastore1] fb5fb791-5f62-4717-8d8f-7d56ffda15be {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2624.409860] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f000effb-8f97-44de-868c-85b044a217ea {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2624.416537] env[61215]: DEBUG oslo_vmware.api [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Waiting for the task: (returnval){ [ 2624.416537] env[61215]: value = "task-1690455" [ 2624.416537] env[61215]: _type = "Task" [ 2624.416537] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2624.425710] env[61215]: DEBUG oslo_vmware.api [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Task: {'id': task-1690455, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2624.859744] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Preparing fetch location {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2624.860013] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Creating directory with path [datastore1] vmware_temp/448c61eb-f5f8-4cba-b04c-603532e85aa0/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2624.860263] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9e77a9dc-8781-4ae6-8503-79ce2bb7f305 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2624.871600] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Created directory with path [datastore1] vmware_temp/448c61eb-f5f8-4cba-b04c-603532e85aa0/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2624.871791] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Fetch image to [datastore1] vmware_temp/448c61eb-f5f8-4cba-b04c-603532e85aa0/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2624.872046] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to [datastore1] vmware_temp/448c61eb-f5f8-4cba-b04c-603532e85aa0/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2624.872683] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89c8316e-bceb-4458-ae2c-62aa5eb9287d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2624.879706] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6fc3389-bcdb-48a0-ab3f-5b6ff9345fc0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2624.888866] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-094f6414-dcb7-47a3-ac99-79f156d8f43f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2624.922238] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-479ba2f6-630e-4f16-8329-fdb0a9cd8c6e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2624.929600] env[61215]: DEBUG oslo_vmware.api [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Task: {'id': task-1690455, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.083675} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2624.931058] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2624.931258] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Deleted contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2624.931439] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2624.931618] env[61215]: INFO nova.compute.manager [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2624.933366] env[61215]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-eb9cf29a-c76a-4045-a86e-ddea68261dcf {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2624.935196] env[61215]: DEBUG nova.compute.claims [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Aborting claim: {{(pid=61215) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2624.935371] env[61215]: DEBUG oslo_concurrency.lockutils [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2624.935585] env[61215]: DEBUG oslo_concurrency.lockutils [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2624.958732] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2625.073630] env[61215]: DEBUG oslo_vmware.rw_handles [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/448c61eb-f5f8-4cba-b04c-603532e85aa0/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2625.077653] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2fc4e5d-2af0-466c-baf3-9a69015b7e8d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2625.139593] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d211ac9e-2fdc-47d3-8be4-0817901e898e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2625.142937] env[61215]: DEBUG oslo_vmware.rw_handles [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Completed reading data from the image iterator. {{(pid=61215) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2625.143125] env[61215]: DEBUG oslo_vmware.rw_handles [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/448c61eb-f5f8-4cba-b04c-603532e85aa0/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2625.895070] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2e34dcf-b51f-4b8e-a2da-98858e6f15a9 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2625.902979] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9e241b4-74a9-4ef2-9c68-6801cbea5dcf {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2625.915656] env[61215]: DEBUG nova.compute.provider_tree [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2625.924510] env[61215]: DEBUG nova.scheduler.client.report [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2625.938291] env[61215]: DEBUG oslo_concurrency.lockutils [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.002s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2625.938393] env[61215]: ERROR nova.compute.manager [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2625.938393] env[61215]: Faults: ['InvalidArgument'] [ 2625.938393] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Traceback (most recent call last): [ 2625.938393] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2625.938393] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] self.driver.spawn(context, instance, image_meta, [ 2625.938393] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2625.938393] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2625.938393] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2625.938393] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] self._fetch_image_if_missing(context, vi) [ 2625.938393] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2625.938393] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] image_cache(vi, tmp_image_ds_loc) [ 2625.938393] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2625.938393] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] vm_util.copy_virtual_disk( [ 2625.938393] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2625.938393] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] session._wait_for_task(vmdk_copy_task) [ 2625.938393] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2625.938393] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] return self.wait_for_task(task_ref) [ 2625.938393] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2625.938393] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] return evt.wait() [ 2625.938393] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2625.938393] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] result = hub.switch() [ 2625.938393] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2625.938393] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] return self.greenlet.switch() [ 2625.938393] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2625.938393] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] self.f(*self.args, **self.kw) [ 2625.938393] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2625.938393] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] raise exceptions.translate_fault(task_info.error) [ 2625.938393] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2625.938393] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Faults: ['InvalidArgument'] [ 2625.938393] env[61215]: ERROR nova.compute.manager [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] [ 2625.939588] env[61215]: DEBUG nova.compute.utils [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] VimFaultException {{(pid=61215) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2625.940582] env[61215]: DEBUG nova.compute.manager [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Build of instance fb5fb791-5f62-4717-8d8f-7d56ffda15be was re-scheduled: A specified parameter was not correct: fileType [ 2625.940582] env[61215]: Faults: ['InvalidArgument'] {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2625.940964] env[61215]: DEBUG nova.compute.manager [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Unplugging VIFs for instance {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2625.941162] env[61215]: DEBUG nova.compute.manager [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2625.941339] env[61215]: DEBUG nova.compute.manager [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2625.941947] env[61215]: DEBUG nova.network.neutron [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2626.317317] env[61215]: DEBUG nova.network.neutron [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2626.326999] env[61215]: INFO nova.compute.manager [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Took 0.39 seconds to deallocate network for instance. [ 2626.421764] env[61215]: INFO nova.scheduler.client.report [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Deleted allocations for instance fb5fb791-5f62-4717-8d8f-7d56ffda15be [ 2626.446037] env[61215]: DEBUG oslo_concurrency.lockutils [None req-87161016-be79-4f46-9aec-d900374f9aed tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Lock "fb5fb791-5f62-4717-8d8f-7d56ffda15be" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 673.054s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2626.446326] env[61215]: DEBUG oslo_concurrency.lockutils [None req-2e5fe318-670c-4983-b735-75381ac4a77f tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Lock "fb5fb791-5f62-4717-8d8f-7d56ffda15be" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 476.648s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2626.446882] env[61215]: DEBUG oslo_concurrency.lockutils [None req-2e5fe318-670c-4983-b735-75381ac4a77f tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Acquiring lock "fb5fb791-5f62-4717-8d8f-7d56ffda15be-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2626.447072] env[61215]: DEBUG oslo_concurrency.lockutils [None req-2e5fe318-670c-4983-b735-75381ac4a77f tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Lock "fb5fb791-5f62-4717-8d8f-7d56ffda15be-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2626.447354] env[61215]: DEBUG oslo_concurrency.lockutils [None req-2e5fe318-670c-4983-b735-75381ac4a77f tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Lock "fb5fb791-5f62-4717-8d8f-7d56ffda15be-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2626.450035] env[61215]: INFO nova.compute.manager [None req-2e5fe318-670c-4983-b735-75381ac4a77f tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Terminating instance [ 2626.451952] env[61215]: DEBUG nova.compute.manager [None req-2e5fe318-670c-4983-b735-75381ac4a77f tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2626.452279] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-2e5fe318-670c-4983-b735-75381ac4a77f tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2626.452541] env[61215]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d4b5285f-8f27-4181-96f7-dafecb281f79 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2626.462160] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49a0a52f-1ac1-41df-b5e0-f2c5ac1480ed {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2626.488543] env[61215]: WARNING nova.virt.vmwareapi.vmops [None req-2e5fe318-670c-4983-b735-75381ac4a77f tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance fb5fb791-5f62-4717-8d8f-7d56ffda15be could not be found. [ 2626.488759] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-2e5fe318-670c-4983-b735-75381ac4a77f tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2626.488940] env[61215]: INFO nova.compute.manager [None req-2e5fe318-670c-4983-b735-75381ac4a77f tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2626.489213] env[61215]: DEBUG oslo.service.loopingcall [None req-2e5fe318-670c-4983-b735-75381ac4a77f tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2626.489724] env[61215]: DEBUG nova.compute.manager [-] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2626.489835] env[61215]: DEBUG nova.network.neutron [-] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2626.512999] env[61215]: DEBUG nova.network.neutron [-] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2626.521383] env[61215]: INFO nova.compute.manager [-] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] Took 0.03 seconds to deallocate network for instance. [ 2626.614541] env[61215]: DEBUG oslo_concurrency.lockutils [None req-2e5fe318-670c-4983-b735-75381ac4a77f tempest-ServersNegativeTestMultiTenantJSON-367018863 tempest-ServersNegativeTestMultiTenantJSON-367018863-project-member] Lock "fb5fb791-5f62-4717-8d8f-7d56ffda15be" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.168s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2626.615430] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "fb5fb791-5f62-4717-8d8f-7d56ffda15be" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 184.620s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2626.615628] env[61215]: INFO nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: fb5fb791-5f62-4717-8d8f-7d56ffda15be] During sync_power_state the instance has a pending task (deleting). Skip. [ 2626.615797] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "fb5fb791-5f62-4717-8d8f-7d56ffda15be" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2641.650638] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2643.655476] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2647.654463] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2648.654645] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2648.654937] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61215) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 2650.655570] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2650.655570] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2651.654567] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2651.666267] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2651.666580] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2651.666678] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2651.666810] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61215) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2651.667960] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf1c1867-782b-46e5-9df5-acef0be4ad9b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2651.677010] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96d34530-9e1e-4b4e-acf0-424d70e7bf4f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2651.691237] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71df0d23-8751-4a1f-ab1a-eb3b82eb3837 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2651.697550] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b6f4abb-4e7f-4fd0-b813-b1271fb21da1 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2651.726217] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181319MB free_disk=173GB free_vcpus=48 pci_devices=None {{(pid=61215) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2651.726359] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2651.726544] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2651.792685] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 455e7272-f099-496f-b929-ed6fa9a0ab44 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2651.792848] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 9eacbeb5-b918-4b0f-82f4-d06a037803df actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2651.792964] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 49ab8e42-2da3-474b-b283-9d31b089fd76 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2651.793100] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 799c902d-2bc1-4738-b3af-772a5feea819 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2651.793229] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 02265af9-44e6-4341-ba30-be7caad7da8b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2651.793349] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 81e63102-75dc-4f4b-9b48-a63b2a9123f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2651.793464] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance e13fe4b8-f445-46f6-a896-8db6fd85fa71 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2651.793637] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2651.793773] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1408MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2651.884018] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2feb63b8-52d2-4690-9ef4-c0495b805824 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2651.890132] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b319194b-b4f1-444c-a264-144373a298e7 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2651.920107] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0622827f-5bd6-4a79-a419-59b4e5df435d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2651.928573] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7804c7e-06e0-43f7-91c1-84291890a2e7 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2651.941040] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2651.948867] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2651.964235] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61215) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2651.964416] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.238s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2653.965130] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2653.965495] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Starting heal instance info cache {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 2653.965495] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Rebuilding the list of instances to heal {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2653.982781] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2653.982955] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2653.983093] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2653.983223] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2653.983348] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2653.983467] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2653.983584] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2653.983701] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Didn't find any instances for network info cache update. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 2653.984170] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2655.669660] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2667.222484] env[61215]: DEBUG oslo_concurrency.lockutils [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Acquiring lock "bc18b836-2fdc-4750-8720-b5b5433fec84" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2667.222778] env[61215]: DEBUG oslo_concurrency.lockutils [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Lock "bc18b836-2fdc-4750-8720-b5b5433fec84" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2667.237014] env[61215]: DEBUG nova.compute.manager [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2667.283586] env[61215]: DEBUG oslo_concurrency.lockutils [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2667.283825] env[61215]: DEBUG oslo_concurrency.lockutils [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2667.285206] env[61215]: INFO nova.compute.claims [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2667.415504] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-492b15bb-27d4-4fb5-a91b-74e2a0731dde {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2667.422944] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20fdc0e0-f646-48bd-a947-05e7aa6eab3d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2667.451674] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac07d9e5-66c0-4a4a-8553-0578d8e9278f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2667.458324] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be0c8022-6260-4dd5-88a6-511b2fffe37e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2667.472229] env[61215]: DEBUG nova.compute.provider_tree [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2667.482412] env[61215]: DEBUG nova.scheduler.client.report [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2667.495742] env[61215]: DEBUG oslo_concurrency.lockutils [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.212s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2667.496234] env[61215]: DEBUG nova.compute.manager [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Start building networks asynchronously for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2667.526683] env[61215]: DEBUG nova.compute.utils [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Using /dev/sd instead of None {{(pid=61215) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2667.527997] env[61215]: DEBUG nova.compute.manager [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Allocating IP information in the background. {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2667.528134] env[61215]: DEBUG nova.network.neutron [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] allocate_for_instance() {{(pid=61215) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2667.539603] env[61215]: DEBUG nova.compute.manager [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Start building block device mappings for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2667.595112] env[61215]: DEBUG nova.policy [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '27ff5932b5b64df087457974b83bba92', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '919d66c0b792490694750f6760a90114', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61215) authorize /opt/stack/nova/nova/policy.py:203}} [ 2667.601189] env[61215]: DEBUG nova.compute.manager [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Start spawning the instance on the hypervisor. {{(pid=61215) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2667.621302] env[61215]: DEBUG nova.virt.hardware [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:05:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:04:45Z,direct_url=,disk_format='vmdk',id=e91f0c25-9ff9-4937-8440-f47cfb2028bc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9bc3f57c82894c5d9e08b66e77a25dc5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:04:46Z,virtual_size=,visibility=), allow threads: False {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2667.622026] env[61215]: DEBUG nova.virt.hardware [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Flavor limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2667.622026] env[61215]: DEBUG nova.virt.hardware [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Image limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2667.622026] env[61215]: DEBUG nova.virt.hardware [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Flavor pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2667.622219] env[61215]: DEBUG nova.virt.hardware [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Image pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2667.622219] env[61215]: DEBUG nova.virt.hardware [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2667.622404] env[61215]: DEBUG nova.virt.hardware [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2667.622572] env[61215]: DEBUG nova.virt.hardware [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2667.622741] env[61215]: DEBUG nova.virt.hardware [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Got 1 possible topologies {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2667.622907] env[61215]: DEBUG nova.virt.hardware [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2667.623104] env[61215]: DEBUG nova.virt.hardware [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2667.623991] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d7e7707-1048-42d9-8e15-44950d285c6f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2667.631894] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13d8cc8a-a194-40a3-8067-5efc7c441bcf {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2667.906313] env[61215]: DEBUG nova.network.neutron [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Successfully created port: 0629166e-02fa-45b0-a9d7-b2863abcebd0 {{(pid=61215) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2668.546802] env[61215]: DEBUG nova.compute.manager [req-8789d050-829b-4225-85d5-d980cce6355a req-344e31d0-86c9-4583-ba4a-a075222b65fa service nova] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Received event network-vif-plugged-0629166e-02fa-45b0-a9d7-b2863abcebd0 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 2668.547083] env[61215]: DEBUG oslo_concurrency.lockutils [req-8789d050-829b-4225-85d5-d980cce6355a req-344e31d0-86c9-4583-ba4a-a075222b65fa service nova] Acquiring lock "bc18b836-2fdc-4750-8720-b5b5433fec84-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2668.547083] env[61215]: DEBUG oslo_concurrency.lockutils [req-8789d050-829b-4225-85d5-d980cce6355a req-344e31d0-86c9-4583-ba4a-a075222b65fa service nova] Lock "bc18b836-2fdc-4750-8720-b5b5433fec84-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2668.547318] env[61215]: DEBUG oslo_concurrency.lockutils [req-8789d050-829b-4225-85d5-d980cce6355a req-344e31d0-86c9-4583-ba4a-a075222b65fa service nova] Lock "bc18b836-2fdc-4750-8720-b5b5433fec84-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2668.547438] env[61215]: DEBUG nova.compute.manager [req-8789d050-829b-4225-85d5-d980cce6355a req-344e31d0-86c9-4583-ba4a-a075222b65fa service nova] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] No waiting events found dispatching network-vif-plugged-0629166e-02fa-45b0-a9d7-b2863abcebd0 {{(pid=61215) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2668.547606] env[61215]: WARNING nova.compute.manager [req-8789d050-829b-4225-85d5-d980cce6355a req-344e31d0-86c9-4583-ba4a-a075222b65fa service nova] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Received unexpected event network-vif-plugged-0629166e-02fa-45b0-a9d7-b2863abcebd0 for instance with vm_state building and task_state spawning. [ 2668.628347] env[61215]: DEBUG nova.network.neutron [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Successfully updated port: 0629166e-02fa-45b0-a9d7-b2863abcebd0 {{(pid=61215) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2668.639213] env[61215]: DEBUG oslo_concurrency.lockutils [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Acquiring lock "refresh_cache-bc18b836-2fdc-4750-8720-b5b5433fec84" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2668.639365] env[61215]: DEBUG oslo_concurrency.lockutils [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Acquired lock "refresh_cache-bc18b836-2fdc-4750-8720-b5b5433fec84" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2668.639516] env[61215]: DEBUG nova.network.neutron [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2668.679067] env[61215]: DEBUG nova.network.neutron [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2668.835100] env[61215]: DEBUG nova.network.neutron [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Updating instance_info_cache with network_info: [{"id": "0629166e-02fa-45b0-a9d7-b2863abcebd0", "address": "fa:16:3e:a8:ae:3e", "network": {"id": "33e707ae-ffbe-4208-a3b4-6f45d3a65a85", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2079632580-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "919d66c0b792490694750f6760a90114", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1c797172-a569-458e-aeb0-3f21e589a740", "external-id": "nsx-vlan-transportzone-957", "segmentation_id": 957, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0629166e-02", "ovs_interfaceid": "0629166e-02fa-45b0-a9d7-b2863abcebd0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2668.849694] env[61215]: DEBUG oslo_concurrency.lockutils [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Releasing lock "refresh_cache-bc18b836-2fdc-4750-8720-b5b5433fec84" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2668.850063] env[61215]: DEBUG nova.compute.manager [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Instance network_info: |[{"id": "0629166e-02fa-45b0-a9d7-b2863abcebd0", "address": "fa:16:3e:a8:ae:3e", "network": {"id": "33e707ae-ffbe-4208-a3b4-6f45d3a65a85", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2079632580-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "919d66c0b792490694750f6760a90114", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1c797172-a569-458e-aeb0-3f21e589a740", "external-id": "nsx-vlan-transportzone-957", "segmentation_id": 957, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0629166e-02", "ovs_interfaceid": "0629166e-02fa-45b0-a9d7-b2863abcebd0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2668.850469] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a8:ae:3e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1c797172-a569-458e-aeb0-3f21e589a740', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0629166e-02fa-45b0-a9d7-b2863abcebd0', 'vif_model': 'vmxnet3'}] {{(pid=61215) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2668.858032] env[61215]: DEBUG oslo.service.loopingcall [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2668.858500] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Creating VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2668.858732] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a5b85939-0629-4532-ac5d-bfb0e2553562 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2668.880339] env[61215]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2668.880339] env[61215]: value = "task-1690456" [ 2668.880339] env[61215]: _type = "Task" [ 2668.880339] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2668.888041] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690456, 'name': CreateVM_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2669.390844] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690456, 'name': CreateVM_Task, 'duration_secs': 0.277506} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2669.391025] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Created VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2669.397646] env[61215]: DEBUG oslo_concurrency.lockutils [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2669.397813] env[61215]: DEBUG oslo_concurrency.lockutils [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2669.398165] env[61215]: DEBUG oslo_concurrency.lockutils [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2669.398406] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4cd8709-7357-4b45-8759-68fe540e0b48 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2669.402643] env[61215]: DEBUG oslo_vmware.api [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Waiting for the task: (returnval){ [ 2669.402643] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52aec93a-3791-cb95-435c-67a258c79971" [ 2669.402643] env[61215]: _type = "Task" [ 2669.402643] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2669.409819] env[61215]: DEBUG oslo_vmware.api [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52aec93a-3791-cb95-435c-67a258c79971, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2669.914413] env[61215]: DEBUG oslo_concurrency.lockutils [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2669.914800] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Processing image e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2669.914854] env[61215]: DEBUG oslo_concurrency.lockutils [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2670.570621] env[61215]: DEBUG nova.compute.manager [req-977416f2-f157-482a-ba12-5da9a236cdff req-b44d93bd-f002-4a02-959e-76de1a3ec01f service nova] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Received event network-changed-0629166e-02fa-45b0-a9d7-b2863abcebd0 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 2670.570836] env[61215]: DEBUG nova.compute.manager [req-977416f2-f157-482a-ba12-5da9a236cdff req-b44d93bd-f002-4a02-959e-76de1a3ec01f service nova] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Refreshing instance network info cache due to event network-changed-0629166e-02fa-45b0-a9d7-b2863abcebd0. {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 2670.571090] env[61215]: DEBUG oslo_concurrency.lockutils [req-977416f2-f157-482a-ba12-5da9a236cdff req-b44d93bd-f002-4a02-959e-76de1a3ec01f service nova] Acquiring lock "refresh_cache-bc18b836-2fdc-4750-8720-b5b5433fec84" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2670.571242] env[61215]: DEBUG oslo_concurrency.lockutils [req-977416f2-f157-482a-ba12-5da9a236cdff req-b44d93bd-f002-4a02-959e-76de1a3ec01f service nova] Acquired lock "refresh_cache-bc18b836-2fdc-4750-8720-b5b5433fec84" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2670.571411] env[61215]: DEBUG nova.network.neutron [req-977416f2-f157-482a-ba12-5da9a236cdff req-b44d93bd-f002-4a02-959e-76de1a3ec01f service nova] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Refreshing network info cache for port 0629166e-02fa-45b0-a9d7-b2863abcebd0 {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2670.967604] env[61215]: DEBUG nova.network.neutron [req-977416f2-f157-482a-ba12-5da9a236cdff req-b44d93bd-f002-4a02-959e-76de1a3ec01f service nova] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Updated VIF entry in instance network info cache for port 0629166e-02fa-45b0-a9d7-b2863abcebd0. {{(pid=61215) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2670.967979] env[61215]: DEBUG nova.network.neutron [req-977416f2-f157-482a-ba12-5da9a236cdff req-b44d93bd-f002-4a02-959e-76de1a3ec01f service nova] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Updating instance_info_cache with network_info: [{"id": "0629166e-02fa-45b0-a9d7-b2863abcebd0", "address": "fa:16:3e:a8:ae:3e", "network": {"id": "33e707ae-ffbe-4208-a3b4-6f45d3a65a85", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2079632580-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "919d66c0b792490694750f6760a90114", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1c797172-a569-458e-aeb0-3f21e589a740", "external-id": "nsx-vlan-transportzone-957", "segmentation_id": 957, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0629166e-02", "ovs_interfaceid": "0629166e-02fa-45b0-a9d7-b2863abcebd0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2670.977640] env[61215]: DEBUG oslo_concurrency.lockutils [req-977416f2-f157-482a-ba12-5da9a236cdff req-b44d93bd-f002-4a02-959e-76de1a3ec01f service nova] Releasing lock "refresh_cache-bc18b836-2fdc-4750-8720-b5b5433fec84" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2674.690094] env[61215]: WARNING oslo_vmware.rw_handles [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2674.690094] env[61215]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2674.690094] env[61215]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2674.690094] env[61215]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2674.690094] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2674.690094] env[61215]: ERROR oslo_vmware.rw_handles response.begin() [ 2674.690094] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2674.690094] env[61215]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2674.690094] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2674.690094] env[61215]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2674.690094] env[61215]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2674.690094] env[61215]: ERROR oslo_vmware.rw_handles [ 2674.690094] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Downloaded image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to vmware_temp/448c61eb-f5f8-4cba-b04c-603532e85aa0/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2674.691994] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Caching image {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2674.692286] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Copying Virtual Disk [datastore1] vmware_temp/448c61eb-f5f8-4cba-b04c-603532e85aa0/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk to [datastore1] vmware_temp/448c61eb-f5f8-4cba-b04c-603532e85aa0/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk {{(pid=61215) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2674.692565] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-badb9572-9f02-4cae-9526-42dbc989d980 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2674.700870] env[61215]: DEBUG oslo_vmware.api [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Waiting for the task: (returnval){ [ 2674.700870] env[61215]: value = "task-1690457" [ 2674.700870] env[61215]: _type = "Task" [ 2674.700870] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2674.708182] env[61215]: DEBUG oslo_vmware.api [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Task: {'id': task-1690457, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2675.210792] env[61215]: DEBUG oslo_vmware.exceptions [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Fault InvalidArgument not matched. {{(pid=61215) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2675.211040] env[61215]: DEBUG oslo_concurrency.lockutils [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2675.211579] env[61215]: ERROR nova.compute.manager [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2675.211579] env[61215]: Faults: ['InvalidArgument'] [ 2675.211579] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Traceback (most recent call last): [ 2675.211579] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2675.211579] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] yield resources [ 2675.211579] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2675.211579] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] self.driver.spawn(context, instance, image_meta, [ 2675.211579] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2675.211579] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2675.211579] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2675.211579] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] self._fetch_image_if_missing(context, vi) [ 2675.211579] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2675.211579] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] image_cache(vi, tmp_image_ds_loc) [ 2675.211579] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2675.211579] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] vm_util.copy_virtual_disk( [ 2675.211579] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2675.211579] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] session._wait_for_task(vmdk_copy_task) [ 2675.211579] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2675.211579] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] return self.wait_for_task(task_ref) [ 2675.211579] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2675.211579] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] return evt.wait() [ 2675.211579] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2675.211579] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] result = hub.switch() [ 2675.211579] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2675.211579] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] return self.greenlet.switch() [ 2675.211579] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2675.211579] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] self.f(*self.args, **self.kw) [ 2675.211579] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2675.211579] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] raise exceptions.translate_fault(task_info.error) [ 2675.211579] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2675.211579] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Faults: ['InvalidArgument'] [ 2675.211579] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] [ 2675.212759] env[61215]: INFO nova.compute.manager [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Terminating instance [ 2675.213529] env[61215]: DEBUG oslo_concurrency.lockutils [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2675.213746] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2675.213989] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a2f75414-1a85-4302-953a-f28326fd0013 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2675.216061] env[61215]: DEBUG nova.compute.manager [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2675.216262] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2675.216957] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55c21cec-164e-4ace-807d-687de3140750 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2675.223975] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Unregistering the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2675.224889] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b8ef42c3-0475-4c1c-9580-a764fafaf7f7 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2675.226207] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2675.226381] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61215) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2675.227035] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b49857e7-d739-4a6f-b02d-91d8494fc8dd {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2675.232276] env[61215]: DEBUG oslo_vmware.api [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Waiting for the task: (returnval){ [ 2675.232276] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52d14b03-1ee5-cf3d-b5b9-02de9190635e" [ 2675.232276] env[61215]: _type = "Task" [ 2675.232276] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2675.239359] env[61215]: DEBUG oslo_vmware.api [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52d14b03-1ee5-cf3d-b5b9-02de9190635e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2675.300970] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Unregistered the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2675.301193] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Deleting contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2675.301382] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Deleting the datastore file [datastore1] 455e7272-f099-496f-b929-ed6fa9a0ab44 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2675.301638] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9a0fb7a0-357d-4332-836b-30c0bfc96324 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2675.307854] env[61215]: DEBUG oslo_vmware.api [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Waiting for the task: (returnval){ [ 2675.307854] env[61215]: value = "task-1690459" [ 2675.307854] env[61215]: _type = "Task" [ 2675.307854] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2675.315506] env[61215]: DEBUG oslo_vmware.api [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Task: {'id': task-1690459, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2675.743059] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Preparing fetch location {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2675.743059] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Creating directory with path [datastore1] vmware_temp/d4c92368-89e7-4e77-bdcc-939aa0cf8568/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2675.743059] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f5630f9a-2ffe-404a-b6c0-198ea8cf09ff {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2675.754579] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Created directory with path [datastore1] vmware_temp/d4c92368-89e7-4e77-bdcc-939aa0cf8568/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2675.754770] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Fetch image to [datastore1] vmware_temp/d4c92368-89e7-4e77-bdcc-939aa0cf8568/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2675.754945] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to [datastore1] vmware_temp/d4c92368-89e7-4e77-bdcc-939aa0cf8568/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2675.755676] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d08eb690-f5f8-47c8-9a5b-6d089d3f7a8e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2675.761837] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d00d48bf-a122-4ff8-9dea-66bcb401a67b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2675.770585] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1acd3faf-fe80-44b9-944d-ed575f065387 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2675.800732] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb98fb1d-7b4d-406f-899f-1e79e5dde764 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2675.806610] env[61215]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-da42d7d0-a02d-4bcb-aa63-6e63b170ffb4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2675.815426] env[61215]: DEBUG oslo_vmware.api [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Task: {'id': task-1690459, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080237} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2675.815665] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2675.815855] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Deleted contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2675.816053] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2675.816272] env[61215]: INFO nova.compute.manager [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2675.818535] env[61215]: DEBUG nova.compute.claims [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Aborting claim: {{(pid=61215) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2675.818738] env[61215]: DEBUG oslo_concurrency.lockutils [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2675.819039] env[61215]: DEBUG oslo_concurrency.lockutils [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2675.831044] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2675.954951] env[61215]: DEBUG oslo_concurrency.lockutils [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2675.954951] env[61215]: ERROR nova.compute.manager [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image e91f0c25-9ff9-4937-8440-f47cfb2028bc. [ 2675.954951] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Traceback (most recent call last): [ 2675.954951] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2675.954951] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2675.954951] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2675.954951] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] result = getattr(controller, method)(*args, **kwargs) [ 2675.954951] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2675.954951] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] return self._get(image_id) [ 2675.954951] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2675.954951] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2675.954951] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2675.954951] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] resp, body = self.http_client.get(url, headers=header) [ 2675.954951] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 2675.954951] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] return self.request(url, 'GET', **kwargs) [ 2675.954951] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2675.954951] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] return self._handle_response(resp) [ 2675.954951] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2675.954951] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] raise exc.from_response(resp, resp.content) [ 2675.954951] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 2675.954951] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] [ 2675.954951] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] During handling of the above exception, another exception occurred: [ 2675.954951] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] [ 2675.954951] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Traceback (most recent call last): [ 2675.954951] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2675.954951] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] yield resources [ 2675.954951] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2675.954951] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] self.driver.spawn(context, instance, image_meta, [ 2675.954951] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2675.954951] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2675.954951] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2675.954951] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] self._fetch_image_if_missing(context, vi) [ 2675.954951] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 2675.954951] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] image_fetch(context, vi, tmp_image_ds_loc) [ 2675.954951] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 2675.956137] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] images.fetch_image( [ 2675.956137] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 2675.956137] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] metadata = IMAGE_API.get(context, image_ref) [ 2675.956137] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 2675.956137] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] return session.show(context, image_id, [ 2675.956137] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 2675.956137] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] _reraise_translated_image_exception(image_id) [ 2675.956137] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 2675.956137] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] raise new_exc.with_traceback(exc_trace) [ 2675.956137] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2675.956137] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2675.956137] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2675.956137] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] result = getattr(controller, method)(*args, **kwargs) [ 2675.956137] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2675.956137] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] return self._get(image_id) [ 2675.956137] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2675.956137] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2675.956137] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2675.956137] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] resp, body = self.http_client.get(url, headers=header) [ 2675.956137] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 2675.956137] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] return self.request(url, 'GET', **kwargs) [ 2675.956137] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2675.956137] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] return self._handle_response(resp) [ 2675.956137] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2675.956137] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] raise exc.from_response(resp, resp.content) [ 2675.956137] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] nova.exception.ImageNotAuthorized: Not authorized for image e91f0c25-9ff9-4937-8440-f47cfb2028bc. [ 2675.956137] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] [ 2675.956137] env[61215]: INFO nova.compute.manager [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Terminating instance [ 2675.957324] env[61215]: DEBUG oslo_concurrency.lockutils [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2675.957494] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2675.960374] env[61215]: DEBUG nova.compute.manager [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2675.960578] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2675.960841] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-31d2f709-c13e-46d8-abc5-2c8c1b955621 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2675.963290] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c78c2f9-77b3-471b-b7b9-84ab0c2dfb15 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2675.970581] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Unregistering the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2675.970816] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1f15818d-821d-4a2e-a6cd-f047462afc6e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2675.973014] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2675.973208] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61215) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2675.974184] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc095174-52b3-4cbb-9556-4b7d238db669 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2675.981262] env[61215]: DEBUG oslo_vmware.api [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Waiting for the task: (returnval){ [ 2675.981262] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52b2f136-a52d-c011-462b-cfbc6e30f2f2" [ 2675.981262] env[61215]: _type = "Task" [ 2675.981262] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2675.988505] env[61215]: DEBUG oslo_vmware.api [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52b2f136-a52d-c011-462b-cfbc6e30f2f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2675.989952] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9293177-db81-464b-94cd-bfda11d3f80e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2675.995903] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66d7c7a0-657e-4c8a-876f-d6e9a6d3caba {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2676.029035] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79b073c9-b1d9-47ba-b8b6-8df9ffcff2df {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2676.033317] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7b1041e-5e75-4d54-97b8-a8fef31cdbb5 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2676.046484] env[61215]: DEBUG nova.compute.provider_tree [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2676.056025] env[61215]: DEBUG nova.scheduler.client.report [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2676.072100] env[61215]: DEBUG oslo_concurrency.lockutils [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.253s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2676.072689] env[61215]: ERROR nova.compute.manager [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2676.072689] env[61215]: Faults: ['InvalidArgument'] [ 2676.072689] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Traceback (most recent call last): [ 2676.072689] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2676.072689] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] self.driver.spawn(context, instance, image_meta, [ 2676.072689] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2676.072689] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2676.072689] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2676.072689] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] self._fetch_image_if_missing(context, vi) [ 2676.072689] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2676.072689] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] image_cache(vi, tmp_image_ds_loc) [ 2676.072689] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2676.072689] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] vm_util.copy_virtual_disk( [ 2676.072689] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2676.072689] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] session._wait_for_task(vmdk_copy_task) [ 2676.072689] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2676.072689] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] return self.wait_for_task(task_ref) [ 2676.072689] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2676.072689] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] return evt.wait() [ 2676.072689] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2676.072689] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] result = hub.switch() [ 2676.072689] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2676.072689] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] return self.greenlet.switch() [ 2676.072689] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2676.072689] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] self.f(*self.args, **self.kw) [ 2676.072689] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2676.072689] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] raise exceptions.translate_fault(task_info.error) [ 2676.072689] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2676.072689] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Faults: ['InvalidArgument'] [ 2676.072689] env[61215]: ERROR nova.compute.manager [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] [ 2676.073799] env[61215]: DEBUG nova.compute.utils [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] VimFaultException {{(pid=61215) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2676.075097] env[61215]: DEBUG nova.compute.manager [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Build of instance 455e7272-f099-496f-b929-ed6fa9a0ab44 was re-scheduled: A specified parameter was not correct: fileType [ 2676.075097] env[61215]: Faults: ['InvalidArgument'] {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2676.075500] env[61215]: DEBUG nova.compute.manager [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Unplugging VIFs for instance {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2676.075681] env[61215]: DEBUG nova.compute.manager [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2676.075857] env[61215]: DEBUG nova.compute.manager [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2676.076044] env[61215]: DEBUG nova.network.neutron [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2676.081132] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Unregistered the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2676.081333] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Deleting contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2676.081514] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Deleting the datastore file [datastore1] 9eacbeb5-b918-4b0f-82f4-d06a037803df {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2676.081762] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3310963f-81fb-47d7-86dc-00509023d9c5 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2676.088708] env[61215]: DEBUG oslo_vmware.api [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Waiting for the task: (returnval){ [ 2676.088708] env[61215]: value = "task-1690461" [ 2676.088708] env[61215]: _type = "Task" [ 2676.088708] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2676.096942] env[61215]: DEBUG oslo_vmware.api [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Task: {'id': task-1690461, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2676.386724] env[61215]: DEBUG nova.network.neutron [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2676.398614] env[61215]: INFO nova.compute.manager [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Took 0.32 seconds to deallocate network for instance. [ 2676.493569] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Preparing fetch location {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2676.493569] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Creating directory with path [datastore1] vmware_temp/55e2c35c-1c65-43a7-a014-c000b2b8a6f7/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2676.493945] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7b121961-cfa2-45e2-a9e9-2bfbd4bec418 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2676.496936] env[61215]: INFO nova.scheduler.client.report [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Deleted allocations for instance 455e7272-f099-496f-b929-ed6fa9a0ab44 [ 2676.513135] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Created directory with path [datastore1] vmware_temp/55e2c35c-1c65-43a7-a014-c000b2b8a6f7/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2676.514016] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Fetch image to [datastore1] vmware_temp/55e2c35c-1c65-43a7-a014-c000b2b8a6f7/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2676.514016] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to [datastore1] vmware_temp/55e2c35c-1c65-43a7-a014-c000b2b8a6f7/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2676.514676] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2165ad3c-86ca-4981-a348-55783267c9fe {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2676.521642] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8119201-7408-4fc5-8a8a-f71b6d3cd575 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2676.525776] env[61215]: DEBUG oslo_concurrency.lockutils [None req-3bdf1450-f94d-402f-8d13-2149071b36dd tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Lock "455e7272-f099-496f-b929-ed6fa9a0ab44" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 674.400s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2676.530327] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a6183a1f-6ed6-4aa6-ba60-00f69d20af84 tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Lock "455e7272-f099-496f-b929-ed6fa9a0ab44" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 479.001s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2676.530555] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a6183a1f-6ed6-4aa6-ba60-00f69d20af84 tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Acquiring lock "455e7272-f099-496f-b929-ed6fa9a0ab44-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2676.530766] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a6183a1f-6ed6-4aa6-ba60-00f69d20af84 tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Lock "455e7272-f099-496f-b929-ed6fa9a0ab44-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2676.530981] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a6183a1f-6ed6-4aa6-ba60-00f69d20af84 tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Lock "455e7272-f099-496f-b929-ed6fa9a0ab44-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2676.532967] env[61215]: INFO nova.compute.manager [None req-a6183a1f-6ed6-4aa6-ba60-00f69d20af84 tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Terminating instance [ 2676.535293] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04de1604-091e-4b11-9c0d-0b5ebd0360a0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2676.539408] env[61215]: DEBUG nova.compute.manager [None req-a6183a1f-6ed6-4aa6-ba60-00f69d20af84 tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2676.539595] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-a6183a1f-6ed6-4aa6-ba60-00f69d20af84 tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2676.540088] env[61215]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f64d74d6-8cfe-4e59-9cd5-e4cd5dcb9a5b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2676.573042] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b4411f5-c9ef-4260-85dc-8df0c767d380 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2676.583433] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ed12c05-f676-46bb-bf88-8d0d87138858 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2676.592031] env[61215]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d7fae9b8-9351-44b8-ba25-d9a6afecee8b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2676.601302] env[61215]: WARNING nova.virt.vmwareapi.vmops [None req-a6183a1f-6ed6-4aa6-ba60-00f69d20af84 tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 455e7272-f099-496f-b929-ed6fa9a0ab44 could not be found. [ 2676.601493] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-a6183a1f-6ed6-4aa6-ba60-00f69d20af84 tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2676.601674] env[61215]: INFO nova.compute.manager [None req-a6183a1f-6ed6-4aa6-ba60-00f69d20af84 tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Took 0.06 seconds to destroy the instance on the hypervisor. [ 2676.601909] env[61215]: DEBUG oslo.service.loopingcall [None req-a6183a1f-6ed6-4aa6-ba60-00f69d20af84 tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2676.603519] env[61215]: DEBUG nova.compute.manager [-] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2676.603635] env[61215]: DEBUG nova.network.neutron [-] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2676.607920] env[61215]: DEBUG oslo_vmware.api [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Task: {'id': task-1690461, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.095486} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2676.609971] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2676.610198] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Deleted contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2676.610415] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2676.610604] env[61215]: INFO nova.compute.manager [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Took 0.65 seconds to destroy the instance on the hypervisor. [ 2676.613777] env[61215]: DEBUG nova.compute.claims [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Aborting claim: {{(pid=61215) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2676.613971] env[61215]: DEBUG oslo_concurrency.lockutils [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2676.614233] env[61215]: DEBUG oslo_concurrency.lockutils [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2676.633743] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2676.636391] env[61215]: DEBUG nova.network.neutron [-] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2676.655107] env[61215]: INFO nova.compute.manager [-] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] Took 0.05 seconds to deallocate network for instance. [ 2676.695877] env[61215]: DEBUG oslo_vmware.rw_handles [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/55e2c35c-1c65-43a7-a014-c000b2b8a6f7/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2676.757815] env[61215]: DEBUG oslo_vmware.rw_handles [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Completed reading data from the image iterator. {{(pid=61215) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2676.758294] env[61215]: DEBUG oslo_vmware.rw_handles [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/55e2c35c-1c65-43a7-a014-c000b2b8a6f7/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2676.806404] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a6183a1f-6ed6-4aa6-ba60-00f69d20af84 tempest-ServersTestJSON-743167621 tempest-ServersTestJSON-743167621-project-member] Lock "455e7272-f099-496f-b929-ed6fa9a0ab44" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.276s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2676.808184] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "455e7272-f099-496f-b929-ed6fa9a0ab44" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 234.811s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2676.808184] env[61215]: INFO nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 455e7272-f099-496f-b929-ed6fa9a0ab44] During sync_power_state the instance has a pending task (deleting). Skip. [ 2676.808184] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "455e7272-f099-496f-b929-ed6fa9a0ab44" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2676.817398] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18446ffa-346c-4517-a69d-9c52e43d028a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2676.825421] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7c51e68-28f4-4907-8058-fda96a47fb03 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2676.856313] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c29d7662-c8dc-4b48-b224-e10dfeae23e2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2676.863259] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ec88e3d-d924-4eec-aef7-f35a6ede47ef {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2676.878471] env[61215]: DEBUG nova.compute.provider_tree [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2676.887271] env[61215]: DEBUG nova.scheduler.client.report [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2676.900788] env[61215]: DEBUG oslo_concurrency.lockutils [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.286s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2676.901562] env[61215]: ERROR nova.compute.manager [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image e91f0c25-9ff9-4937-8440-f47cfb2028bc. [ 2676.901562] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Traceback (most recent call last): [ 2676.901562] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2676.901562] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2676.901562] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2676.901562] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] result = getattr(controller, method)(*args, **kwargs) [ 2676.901562] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2676.901562] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] return self._get(image_id) [ 2676.901562] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2676.901562] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2676.901562] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2676.901562] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] resp, body = self.http_client.get(url, headers=header) [ 2676.901562] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 2676.901562] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] return self.request(url, 'GET', **kwargs) [ 2676.901562] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2676.901562] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] return self._handle_response(resp) [ 2676.901562] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2676.901562] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] raise exc.from_response(resp, resp.content) [ 2676.901562] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 2676.901562] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] [ 2676.901562] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] During handling of the above exception, another exception occurred: [ 2676.901562] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] [ 2676.901562] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Traceback (most recent call last): [ 2676.901562] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2676.901562] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] self.driver.spawn(context, instance, image_meta, [ 2676.901562] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2676.901562] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2676.901562] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2676.901562] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] self._fetch_image_if_missing(context, vi) [ 2676.901562] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 2676.901562] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] image_fetch(context, vi, tmp_image_ds_loc) [ 2676.901562] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 2676.901562] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] images.fetch_image( [ 2676.901562] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 2676.901562] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] metadata = IMAGE_API.get(context, image_ref) [ 2676.901562] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 2676.901562] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] return session.show(context, image_id, [ 2676.902708] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 2676.902708] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] _reraise_translated_image_exception(image_id) [ 2676.902708] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 2676.902708] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] raise new_exc.with_traceback(exc_trace) [ 2676.902708] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2676.902708] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2676.902708] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2676.902708] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] result = getattr(controller, method)(*args, **kwargs) [ 2676.902708] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2676.902708] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] return self._get(image_id) [ 2676.902708] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2676.902708] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2676.902708] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2676.902708] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] resp, body = self.http_client.get(url, headers=header) [ 2676.902708] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 2676.902708] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] return self.request(url, 'GET', **kwargs) [ 2676.902708] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2676.902708] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] return self._handle_response(resp) [ 2676.902708] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2676.902708] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] raise exc.from_response(resp, resp.content) [ 2676.902708] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] nova.exception.ImageNotAuthorized: Not authorized for image e91f0c25-9ff9-4937-8440-f47cfb2028bc. [ 2676.902708] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] [ 2676.902708] env[61215]: DEBUG nova.compute.utils [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Not authorized for image e91f0c25-9ff9-4937-8440-f47cfb2028bc. {{(pid=61215) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2676.903913] env[61215]: DEBUG nova.compute.manager [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Build of instance 9eacbeb5-b918-4b0f-82f4-d06a037803df was re-scheduled: Not authorized for image e91f0c25-9ff9-4937-8440-f47cfb2028bc. {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2676.904384] env[61215]: DEBUG nova.compute.manager [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Unplugging VIFs for instance {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2676.904561] env[61215]: DEBUG nova.compute.manager [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2676.904728] env[61215]: DEBUG nova.compute.manager [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2676.904898] env[61215]: DEBUG nova.network.neutron [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2677.009105] env[61215]: DEBUG neutronclient.v2_0.client [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61215) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 2677.010534] env[61215]: ERROR nova.compute.manager [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 2677.010534] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Traceback (most recent call last): [ 2677.010534] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2677.010534] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2677.010534] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2677.010534] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] result = getattr(controller, method)(*args, **kwargs) [ 2677.010534] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2677.010534] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] return self._get(image_id) [ 2677.010534] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2677.010534] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2677.010534] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2677.010534] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] resp, body = self.http_client.get(url, headers=header) [ 2677.010534] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 2677.010534] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] return self.request(url, 'GET', **kwargs) [ 2677.010534] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2677.010534] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] return self._handle_response(resp) [ 2677.010534] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2677.010534] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] raise exc.from_response(resp, resp.content) [ 2677.010534] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 2677.010534] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] [ 2677.010534] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] During handling of the above exception, another exception occurred: [ 2677.010534] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] [ 2677.010534] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Traceback (most recent call last): [ 2677.010534] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2677.010534] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] self.driver.spawn(context, instance, image_meta, [ 2677.010534] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2677.010534] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2677.010534] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2677.010534] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] self._fetch_image_if_missing(context, vi) [ 2677.010534] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 2677.010534] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] image_fetch(context, vi, tmp_image_ds_loc) [ 2677.010534] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 2677.010534] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] images.fetch_image( [ 2677.010534] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 2677.010534] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] metadata = IMAGE_API.get(context, image_ref) [ 2677.010534] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 2677.010534] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] return session.show(context, image_id, [ 2677.011726] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 2677.011726] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] _reraise_translated_image_exception(image_id) [ 2677.011726] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 2677.011726] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] raise new_exc.with_traceback(exc_trace) [ 2677.011726] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 2677.011726] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 2677.011726] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 2677.011726] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] result = getattr(controller, method)(*args, **kwargs) [ 2677.011726] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 2677.011726] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] return self._get(image_id) [ 2677.011726] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 2677.011726] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] return RequestIdProxy(wrapped(*args, **kwargs)) [ 2677.011726] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 2677.011726] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] resp, body = self.http_client.get(url, headers=header) [ 2677.011726] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 2677.011726] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] return self.request(url, 'GET', **kwargs) [ 2677.011726] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 2677.011726] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] return self._handle_response(resp) [ 2677.011726] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 2677.011726] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] raise exc.from_response(resp, resp.content) [ 2677.011726] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] nova.exception.ImageNotAuthorized: Not authorized for image e91f0c25-9ff9-4937-8440-f47cfb2028bc. [ 2677.011726] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] [ 2677.011726] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] During handling of the above exception, another exception occurred: [ 2677.011726] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] [ 2677.011726] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Traceback (most recent call last): [ 2677.011726] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/compute/manager.py", line 2448, in _do_build_and_run_instance [ 2677.011726] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] self._build_and_run_instance(context, instance, image, [ 2677.011726] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/compute/manager.py", line 2740, in _build_and_run_instance [ 2677.011726] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] raise exception.RescheduledException( [ 2677.011726] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] nova.exception.RescheduledException: Build of instance 9eacbeb5-b918-4b0f-82f4-d06a037803df was re-scheduled: Not authorized for image e91f0c25-9ff9-4937-8440-f47cfb2028bc. [ 2677.011726] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] [ 2677.011726] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] During handling of the above exception, another exception occurred: [ 2677.011726] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] [ 2677.011726] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Traceback (most recent call last): [ 2677.011726] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2677.011726] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] ret = obj(*args, **kwargs) [ 2677.011726] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2677.011726] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] exception_handler_v20(status_code, error_body) [ 2677.011726] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2677.012975] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] raise client_exc(message=error_message, [ 2677.012975] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2677.012975] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Neutron server returns request_ids: ['req-baa94a2a-ae0b-4dad-b8c8-959cfefcdc19'] [ 2677.012975] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] [ 2677.012975] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] During handling of the above exception, another exception occurred: [ 2677.012975] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] [ 2677.012975] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Traceback (most recent call last): [ 2677.012975] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/compute/manager.py", line 3037, in _cleanup_allocated_networks [ 2677.012975] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] self._deallocate_network(context, instance, requested_networks) [ 2677.012975] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 2677.012975] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] self.network_api.deallocate_for_instance( [ 2677.012975] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2677.012975] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] data = neutron.list_ports(**search_opts) [ 2677.012975] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2677.012975] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] ret = obj(*args, **kwargs) [ 2677.012975] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2677.012975] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] return self.list('ports', self.ports_path, retrieve_all, [ 2677.012975] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2677.012975] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] ret = obj(*args, **kwargs) [ 2677.012975] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2677.012975] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] for r in self._pagination(collection, path, **params): [ 2677.012975] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2677.012975] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] res = self.get(path, params=params) [ 2677.012975] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2677.012975] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] ret = obj(*args, **kwargs) [ 2677.012975] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2677.012975] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] return self.retry_request("GET", action, body=body, [ 2677.012975] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2677.012975] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] ret = obj(*args, **kwargs) [ 2677.012975] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2677.012975] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] return self.do_request(method, action, body=body, [ 2677.012975] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2677.012975] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] ret = obj(*args, **kwargs) [ 2677.012975] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2677.012975] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] self._handle_fault_response(status_code, replybody, resp) [ 2677.012975] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 2677.012975] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] raise exception.Unauthorized() [ 2677.012975] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] nova.exception.Unauthorized: Not authorized. [ 2677.012975] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] [ 2677.076228] env[61215]: INFO nova.scheduler.client.report [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Deleted allocations for instance 9eacbeb5-b918-4b0f-82f4-d06a037803df [ 2677.099037] env[61215]: DEBUG oslo_concurrency.lockutils [None req-87ee5562-c245-4aa6-a0ff-78dae872de81 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Lock "9eacbeb5-b918-4b0f-82f4-d06a037803df" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 627.257s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2677.099316] env[61215]: DEBUG oslo_concurrency.lockutils [None req-14b381d9-c1a3-4492-bed8-665e508715f3 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Lock "9eacbeb5-b918-4b0f-82f4-d06a037803df" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 431.089s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2677.099534] env[61215]: DEBUG oslo_concurrency.lockutils [None req-14b381d9-c1a3-4492-bed8-665e508715f3 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Acquiring lock "9eacbeb5-b918-4b0f-82f4-d06a037803df-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2677.099741] env[61215]: DEBUG oslo_concurrency.lockutils [None req-14b381d9-c1a3-4492-bed8-665e508715f3 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Lock "9eacbeb5-b918-4b0f-82f4-d06a037803df-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2677.099935] env[61215]: DEBUG oslo_concurrency.lockutils [None req-14b381d9-c1a3-4492-bed8-665e508715f3 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Lock "9eacbeb5-b918-4b0f-82f4-d06a037803df-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2677.101815] env[61215]: INFO nova.compute.manager [None req-14b381d9-c1a3-4492-bed8-665e508715f3 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Terminating instance [ 2677.103637] env[61215]: DEBUG nova.compute.manager [None req-14b381d9-c1a3-4492-bed8-665e508715f3 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2677.103717] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-14b381d9-c1a3-4492-bed8-665e508715f3 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2677.104195] env[61215]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-773bd717-398b-414a-a967-e4a1b8693860 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2677.112783] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52ebf07b-da78-4973-8a5c-6276bbfc28b2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2677.139920] env[61215]: WARNING nova.virt.vmwareapi.vmops [None req-14b381d9-c1a3-4492-bed8-665e508715f3 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9eacbeb5-b918-4b0f-82f4-d06a037803df could not be found. [ 2677.140188] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-14b381d9-c1a3-4492-bed8-665e508715f3 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2677.140498] env[61215]: INFO nova.compute.manager [None req-14b381d9-c1a3-4492-bed8-665e508715f3 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2677.140759] env[61215]: DEBUG oslo.service.loopingcall [None req-14b381d9-c1a3-4492-bed8-665e508715f3 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2677.140975] env[61215]: DEBUG nova.compute.manager [-] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2677.141088] env[61215]: DEBUG nova.network.neutron [-] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2677.223314] env[61215]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=61215) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 2677.223314] env[61215]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-f7c48beb-2bb5-4f17-b1e7-e7acb3da5c3e'] [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall self._deallocate_network( [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2677.223823] env[61215]: ERROR oslo.service.loopingcall [ 2677.225507] env[61215]: ERROR nova.compute.manager [None req-14b381d9-c1a3-4492-bed8-665e508715f3 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2677.258063] env[61215]: ERROR nova.compute.manager [None req-14b381d9-c1a3-4492-bed8-665e508715f3 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2677.258063] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Traceback (most recent call last): [ 2677.258063] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2677.258063] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] ret = obj(*args, **kwargs) [ 2677.258063] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2677.258063] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] exception_handler_v20(status_code, error_body) [ 2677.258063] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2677.258063] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] raise client_exc(message=error_message, [ 2677.258063] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2677.258063] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Neutron server returns request_ids: ['req-f7c48beb-2bb5-4f17-b1e7-e7acb3da5c3e'] [ 2677.258063] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] [ 2677.258063] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] During handling of the above exception, another exception occurred: [ 2677.258063] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] [ 2677.258063] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Traceback (most recent call last): [ 2677.258063] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 2677.258063] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] self._delete_instance(context, instance, bdms) [ 2677.258063] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 2677.258063] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] self._shutdown_instance(context, instance, bdms) [ 2677.258063] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 2677.258063] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] self._try_deallocate_network(context, instance, requested_networks) [ 2677.258063] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 2677.258063] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] with excutils.save_and_reraise_exception(): [ 2677.258063] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2677.258063] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] self.force_reraise() [ 2677.258063] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2677.258063] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] raise self.value [ 2677.258063] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 2677.258063] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] _deallocate_network_with_retries() [ 2677.258063] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 2677.258063] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] return evt.wait() [ 2677.258063] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2677.258063] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] result = hub.switch() [ 2677.258063] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2677.258063] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] return self.greenlet.switch() [ 2677.258063] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2677.258063] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] result = func(*self.args, **self.kw) [ 2677.258063] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2677.259419] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] result = f(*args, **kwargs) [ 2677.259419] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 2677.259419] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] self._deallocate_network( [ 2677.259419] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 2677.259419] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] self.network_api.deallocate_for_instance( [ 2677.259419] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2677.259419] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] data = neutron.list_ports(**search_opts) [ 2677.259419] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2677.259419] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] ret = obj(*args, **kwargs) [ 2677.259419] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2677.259419] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] return self.list('ports', self.ports_path, retrieve_all, [ 2677.259419] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2677.259419] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] ret = obj(*args, **kwargs) [ 2677.259419] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2677.259419] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] for r in self._pagination(collection, path, **params): [ 2677.259419] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2677.259419] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] res = self.get(path, params=params) [ 2677.259419] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2677.259419] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] ret = obj(*args, **kwargs) [ 2677.259419] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2677.259419] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] return self.retry_request("GET", action, body=body, [ 2677.259419] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2677.259419] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] ret = obj(*args, **kwargs) [ 2677.259419] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2677.259419] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] return self.do_request(method, action, body=body, [ 2677.259419] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2677.259419] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] ret = obj(*args, **kwargs) [ 2677.259419] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2677.259419] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] self._handle_fault_response(status_code, replybody, resp) [ 2677.259419] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2677.259419] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2677.259419] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2677.259419] env[61215]: ERROR nova.compute.manager [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] [ 2677.284490] env[61215]: DEBUG oslo_concurrency.lockutils [None req-14b381d9-c1a3-4492-bed8-665e508715f3 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Lock "9eacbeb5-b918-4b0f-82f4-d06a037803df" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.185s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2677.285522] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "9eacbeb5-b918-4b0f-82f4-d06a037803df" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 235.289s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2677.285713] env[61215]: INFO nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] During sync_power_state the instance has a pending task (deleting). Skip. [ 2677.285890] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "9eacbeb5-b918-4b0f-82f4-d06a037803df" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2677.327324] env[61215]: INFO nova.compute.manager [None req-14b381d9-c1a3-4492-bed8-665e508715f3 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] [instance: 9eacbeb5-b918-4b0f-82f4-d06a037803df] Successfully reverted task state from None on failure for instance. [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server [None req-14b381d9-c1a3-4492-bed8-665e508715f3 tempest-ServersTestMultiNic-1309041055 tempest-ServersTestMultiNic-1309041055-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-f7c48beb-2bb5-4f17-b1e7-e7acb3da5c3e'] [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server raise self.value [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server raise self.value [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server raise self.value [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3345, in terminate_instance [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in do_terminate_instance [ 2677.331294] env[61215]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server raise self.value [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3333, in do_terminate_instance [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3268, in _delete_instance [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3162, in _shutdown_instance [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3076, in _try_deallocate_network [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server raise self.value [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3074, in _try_deallocate_network [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server return evt.wait() [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3063, in _deallocate_network_with_retries [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2283, in _deallocate_network [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2677.333131] env[61215]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2677.334890] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2677.334890] env[61215]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 2677.334890] env[61215]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2677.334890] env[61215]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2677.334890] env[61215]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2677.334890] env[61215]: ERROR oslo_messaging.rpc.server [ 2680.295252] env[61215]: DEBUG oslo_concurrency.lockutils [None req-725f7bb5-a0d1-4ac9-b25b-a38482772ce2 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquiring lock "e13fe4b8-f445-46f6-a896-8db6fd85fa71" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2701.668641] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2705.656540] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2708.653689] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2710.655133] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2710.655434] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2710.655533] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61215) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 2710.655709] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2710.655842] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Cleaning up deleted instances with incomplete migration {{(pid=61215) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11258}} [ 2711.663780] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2711.664104] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2711.664223] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Cleaning up deleted instances {{(pid=61215) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11220}} [ 2711.675549] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] There are 0 instances to clean {{(pid=61215) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 2712.666298] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2712.679449] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2712.681136] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2712.681136] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2712.681136] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61215) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2712.681949] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-070ebbf3-6db0-4abf-a48d-1b6f7d6f5dbd {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2712.691937] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-844afa9f-1396-424d-bc7d-2c7aadaaaf15 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2712.705722] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cdbe770-d14d-4cf6-a1cb-c278d24aaf2d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2712.711830] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-330be927-7fef-4362-a793-ca1be30447d4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2712.741452] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181279MB free_disk=173GB free_vcpus=48 pci_devices=None {{(pid=61215) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2712.741616] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2712.741795] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2712.832165] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 49ab8e42-2da3-474b-b283-9d31b089fd76 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2712.832339] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 799c902d-2bc1-4738-b3af-772a5feea819 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2712.832472] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 02265af9-44e6-4341-ba30-be7caad7da8b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2712.832596] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 81e63102-75dc-4f4b-9b48-a63b2a9123f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2712.832717] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance e13fe4b8-f445-46f6-a896-8db6fd85fa71 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2712.832834] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance bc18b836-2fdc-4750-8720-b5b5433fec84 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2712.833028] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2712.833176] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2712.848656] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Refreshing inventories for resource provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 2712.861991] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Updating ProviderTree inventory for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 2712.862202] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Updating inventory in ProviderTree for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2712.872380] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Refreshing aggregate associations for resource provider 1329e087-aa78-44a2-9687-63a2b1b33fd5, aggregates: None {{(pid=61215) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 2712.890283] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Refreshing trait associations for resource provider 1329e087-aa78-44a2-9687-63a2b1b33fd5, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=61215) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 2712.967874] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3fc8539-7202-4ea2-8296-2af15b97fc96 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2712.975228] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca93bab7-f74f-41b7-8f4f-7894c769da7b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2713.004046] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a8e478b-c399-4c2e-a2c1-435978d8aa71 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2713.010721] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-332fd6ae-a9e6-4075-adb9-76baa8a6184b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2713.024201] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2713.032572] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2713.046326] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61215) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2713.046503] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.305s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2714.034804] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2714.035132] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Starting heal instance info cache {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 2714.035132] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Rebuilding the list of instances to heal {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2714.050429] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2714.050587] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2714.050723] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2714.050851] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2714.050975] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2714.051115] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2714.051237] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Didn't find any instances for network info cache update. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 2714.653949] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2721.039520] env[61215]: DEBUG oslo_concurrency.lockutils [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Acquiring lock "8ca1c8ea-ce6a-4945-873b-aa0e6489baf6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2721.039820] env[61215]: DEBUG oslo_concurrency.lockutils [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Lock "8ca1c8ea-ce6a-4945-873b-aa0e6489baf6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2721.049752] env[61215]: DEBUG nova.compute.manager [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2721.097384] env[61215]: DEBUG oslo_concurrency.lockutils [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2721.097634] env[61215]: DEBUG oslo_concurrency.lockutils [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2721.099269] env[61215]: INFO nova.compute.claims [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2721.219418] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44f08dc6-a4b8-4bb1-a484-82036de7edb0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2721.226625] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-341054bb-cb79-443e-b500-a143d5bfc9b5 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2721.256828] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eac7229f-e5f7-4690-b519-d4b83f9fee03 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2721.263447] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ac06e2f-95fa-4937-b450-e77dfafe785f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2721.276142] env[61215]: DEBUG nova.compute.provider_tree [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2721.285028] env[61215]: DEBUG nova.scheduler.client.report [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2721.298819] env[61215]: DEBUG oslo_concurrency.lockutils [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.201s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2721.299281] env[61215]: DEBUG nova.compute.manager [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Start building networks asynchronously for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2721.329769] env[61215]: DEBUG nova.compute.utils [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Using /dev/sd instead of None {{(pid=61215) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2721.330950] env[61215]: DEBUG nova.compute.manager [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Allocating IP information in the background. {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2721.331130] env[61215]: DEBUG nova.network.neutron [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] allocate_for_instance() {{(pid=61215) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2721.339047] env[61215]: DEBUG nova.compute.manager [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Start building block device mappings for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2721.394086] env[61215]: DEBUG nova.policy [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6578389615ad46528d49d98bf36b459a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd16229c82ee9494f9921831a13c6bf7e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61215) authorize /opt/stack/nova/nova/policy.py:203}} [ 2721.399177] env[61215]: DEBUG nova.compute.manager [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Start spawning the instance on the hypervisor. {{(pid=61215) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2721.422785] env[61215]: DEBUG nova.virt.hardware [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:05:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:04:45Z,direct_url=,disk_format='vmdk',id=e91f0c25-9ff9-4937-8440-f47cfb2028bc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9bc3f57c82894c5d9e08b66e77a25dc5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:04:46Z,virtual_size=,visibility=), allow threads: False {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2721.423048] env[61215]: DEBUG nova.virt.hardware [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Flavor limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2721.423220] env[61215]: DEBUG nova.virt.hardware [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Image limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2721.423411] env[61215]: DEBUG nova.virt.hardware [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Flavor pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2721.423593] env[61215]: DEBUG nova.virt.hardware [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Image pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2721.423758] env[61215]: DEBUG nova.virt.hardware [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2721.423968] env[61215]: DEBUG nova.virt.hardware [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2721.424153] env[61215]: DEBUG nova.virt.hardware [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2721.424332] env[61215]: DEBUG nova.virt.hardware [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Got 1 possible topologies {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2721.424505] env[61215]: DEBUG nova.virt.hardware [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2721.424681] env[61215]: DEBUG nova.virt.hardware [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2721.425548] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ded492a-ef0f-44cb-98df-27c0ba9471d3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2721.433302] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab605882-770d-4404-9400-5a58079e489c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2721.654510] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2721.710914] env[61215]: DEBUG nova.network.neutron [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Successfully created port: 14bac21c-86e3-48e0-b30e-96feb2bb249e {{(pid=61215) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2722.263627] env[61215]: DEBUG nova.compute.manager [req-90da0775-bef2-4d3d-9ab6-d2ee5e3933a8 req-3ecebcf8-9100-4f06-8986-9d56e92ce8f6 service nova] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Received event network-vif-plugged-14bac21c-86e3-48e0-b30e-96feb2bb249e {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 2722.263892] env[61215]: DEBUG oslo_concurrency.lockutils [req-90da0775-bef2-4d3d-9ab6-d2ee5e3933a8 req-3ecebcf8-9100-4f06-8986-9d56e92ce8f6 service nova] Acquiring lock "8ca1c8ea-ce6a-4945-873b-aa0e6489baf6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2722.264082] env[61215]: DEBUG oslo_concurrency.lockutils [req-90da0775-bef2-4d3d-9ab6-d2ee5e3933a8 req-3ecebcf8-9100-4f06-8986-9d56e92ce8f6 service nova] Lock "8ca1c8ea-ce6a-4945-873b-aa0e6489baf6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2722.264261] env[61215]: DEBUG oslo_concurrency.lockutils [req-90da0775-bef2-4d3d-9ab6-d2ee5e3933a8 req-3ecebcf8-9100-4f06-8986-9d56e92ce8f6 service nova] Lock "8ca1c8ea-ce6a-4945-873b-aa0e6489baf6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2722.264433] env[61215]: DEBUG nova.compute.manager [req-90da0775-bef2-4d3d-9ab6-d2ee5e3933a8 req-3ecebcf8-9100-4f06-8986-9d56e92ce8f6 service nova] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] No waiting events found dispatching network-vif-plugged-14bac21c-86e3-48e0-b30e-96feb2bb249e {{(pid=61215) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2722.264602] env[61215]: WARNING nova.compute.manager [req-90da0775-bef2-4d3d-9ab6-d2ee5e3933a8 req-3ecebcf8-9100-4f06-8986-9d56e92ce8f6 service nova] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Received unexpected event network-vif-plugged-14bac21c-86e3-48e0-b30e-96feb2bb249e for instance with vm_state building and task_state spawning. [ 2722.348382] env[61215]: DEBUG nova.network.neutron [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Successfully updated port: 14bac21c-86e3-48e0-b30e-96feb2bb249e {{(pid=61215) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2722.362780] env[61215]: DEBUG oslo_concurrency.lockutils [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Acquiring lock "refresh_cache-8ca1c8ea-ce6a-4945-873b-aa0e6489baf6" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2722.362987] env[61215]: DEBUG oslo_concurrency.lockutils [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Acquired lock "refresh_cache-8ca1c8ea-ce6a-4945-873b-aa0e6489baf6" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2722.363177] env[61215]: DEBUG nova.network.neutron [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2722.403596] env[61215]: DEBUG nova.network.neutron [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2722.477568] env[61215]: WARNING oslo_vmware.rw_handles [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2722.477568] env[61215]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2722.477568] env[61215]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2722.477568] env[61215]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2722.477568] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2722.477568] env[61215]: ERROR oslo_vmware.rw_handles response.begin() [ 2722.477568] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2722.477568] env[61215]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2722.477568] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2722.477568] env[61215]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2722.477568] env[61215]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2722.477568] env[61215]: ERROR oslo_vmware.rw_handles [ 2722.478077] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Downloaded image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to vmware_temp/55e2c35c-1c65-43a7-a014-c000b2b8a6f7/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2722.479956] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Caching image {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2722.480223] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Copying Virtual Disk [datastore1] vmware_temp/55e2c35c-1c65-43a7-a014-c000b2b8a6f7/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk to [datastore1] vmware_temp/55e2c35c-1c65-43a7-a014-c000b2b8a6f7/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk {{(pid=61215) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2722.480804] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-37bf2de3-a185-4985-87a5-64b4794d2dcb {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2722.489968] env[61215]: DEBUG oslo_vmware.api [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Waiting for the task: (returnval){ [ 2722.489968] env[61215]: value = "task-1690462" [ 2722.489968] env[61215]: _type = "Task" [ 2722.489968] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2722.497813] env[61215]: DEBUG oslo_vmware.api [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Task: {'id': task-1690462, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2722.569866] env[61215]: DEBUG nova.network.neutron [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Updating instance_info_cache with network_info: [{"id": "14bac21c-86e3-48e0-b30e-96feb2bb249e", "address": "fa:16:3e:98:24:f1", "network": {"id": "ca8a320f-bc61-4d27-ae96-1e7235aa0925", "bridge": "br-int", "label": "tempest-ServersTestJSON-1082542131-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d16229c82ee9494f9921831a13c6bf7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aef08290-001a-4ae8-aff0-1889e2211389", "external-id": "nsx-vlan-transportzone-389", "segmentation_id": 389, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14bac21c-86", "ovs_interfaceid": "14bac21c-86e3-48e0-b30e-96feb2bb249e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2722.580107] env[61215]: DEBUG oslo_concurrency.lockutils [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Releasing lock "refresh_cache-8ca1c8ea-ce6a-4945-873b-aa0e6489baf6" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2722.580431] env[61215]: DEBUG nova.compute.manager [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Instance network_info: |[{"id": "14bac21c-86e3-48e0-b30e-96feb2bb249e", "address": "fa:16:3e:98:24:f1", "network": {"id": "ca8a320f-bc61-4d27-ae96-1e7235aa0925", "bridge": "br-int", "label": "tempest-ServersTestJSON-1082542131-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d16229c82ee9494f9921831a13c6bf7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aef08290-001a-4ae8-aff0-1889e2211389", "external-id": "nsx-vlan-transportzone-389", "segmentation_id": 389, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14bac21c-86", "ovs_interfaceid": "14bac21c-86e3-48e0-b30e-96feb2bb249e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2722.580832] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:98:24:f1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aef08290-001a-4ae8-aff0-1889e2211389', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '14bac21c-86e3-48e0-b30e-96feb2bb249e', 'vif_model': 'vmxnet3'}] {{(pid=61215) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2722.588524] env[61215]: DEBUG oslo.service.loopingcall [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2722.589010] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Creating VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2722.589245] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-15b1e564-c1e6-41d7-a2ad-f4be0901aabf {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2722.611750] env[61215]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2722.611750] env[61215]: value = "task-1690463" [ 2722.611750] env[61215]: _type = "Task" [ 2722.611750] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2722.619380] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690463, 'name': CreateVM_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2723.000933] env[61215]: DEBUG oslo_vmware.exceptions [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Fault InvalidArgument not matched. {{(pid=61215) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2723.001267] env[61215]: DEBUG oslo_concurrency.lockutils [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2723.001825] env[61215]: ERROR nova.compute.manager [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2723.001825] env[61215]: Faults: ['InvalidArgument'] [ 2723.001825] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Traceback (most recent call last): [ 2723.001825] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2723.001825] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] yield resources [ 2723.001825] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2723.001825] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] self.driver.spawn(context, instance, image_meta, [ 2723.001825] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2723.001825] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2723.001825] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2723.001825] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] self._fetch_image_if_missing(context, vi) [ 2723.001825] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2723.001825] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] image_cache(vi, tmp_image_ds_loc) [ 2723.001825] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2723.001825] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] vm_util.copy_virtual_disk( [ 2723.001825] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2723.001825] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] session._wait_for_task(vmdk_copy_task) [ 2723.001825] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2723.001825] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] return self.wait_for_task(task_ref) [ 2723.001825] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2723.001825] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] return evt.wait() [ 2723.001825] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2723.001825] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] result = hub.switch() [ 2723.001825] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2723.001825] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] return self.greenlet.switch() [ 2723.001825] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2723.001825] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] self.f(*self.args, **self.kw) [ 2723.001825] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2723.001825] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] raise exceptions.translate_fault(task_info.error) [ 2723.001825] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2723.001825] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Faults: ['InvalidArgument'] [ 2723.001825] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] [ 2723.003032] env[61215]: INFO nova.compute.manager [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Terminating instance [ 2723.003770] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2723.003981] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2723.004603] env[61215]: DEBUG nova.compute.manager [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2723.004798] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2723.005052] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-69b4fc7b-1baf-48e1-9237-3ac0a41870e3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2723.007378] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7237e3a2-2aa0-442e-a7b0-92969658b634 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2723.014082] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Unregistering the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2723.014326] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-61d0dde0-a121-4f0e-9f4b-fcdc1274b66a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2723.016576] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2723.016749] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61215) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2723.017745] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0d59563-3a47-412b-b56b-6cda8a7f7def {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2723.023124] env[61215]: DEBUG oslo_vmware.api [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Waiting for the task: (returnval){ [ 2723.023124] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]526b3b7d-e4a6-d0b9-8642-0a0ddba35745" [ 2723.023124] env[61215]: _type = "Task" [ 2723.023124] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2723.033571] env[61215]: DEBUG oslo_vmware.api [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]526b3b7d-e4a6-d0b9-8642-0a0ddba35745, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2723.121901] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690463, 'name': CreateVM_Task, 'duration_secs': 0.311705} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2723.122101] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Created VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2723.122746] env[61215]: DEBUG oslo_concurrency.lockutils [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2723.122913] env[61215]: DEBUG oslo_concurrency.lockutils [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2723.123250] env[61215]: DEBUG oslo_concurrency.lockutils [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2723.123491] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-311c2fd9-7f8e-41d1-aee1-daf27903c6bd {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2723.127375] env[61215]: DEBUG oslo_vmware.api [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Waiting for the task: (returnval){ [ 2723.127375] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52fd0dcd-1494-c03d-b631-7ffbe99c3ec0" [ 2723.127375] env[61215]: _type = "Task" [ 2723.127375] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2723.134331] env[61215]: DEBUG oslo_vmware.api [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52fd0dcd-1494-c03d-b631-7ffbe99c3ec0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2723.534015] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Preparing fetch location {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2723.534406] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Creating directory with path [datastore1] vmware_temp/3154802a-8f5a-4a28-a450-d8cef6ad232b/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2723.534610] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fbbbb7d7-bb79-4a16-86aa-a20128318f0f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2723.553370] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Created directory with path [datastore1] vmware_temp/3154802a-8f5a-4a28-a450-d8cef6ad232b/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2723.553598] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Fetch image to [datastore1] vmware_temp/3154802a-8f5a-4a28-a450-d8cef6ad232b/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2723.553819] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to [datastore1] vmware_temp/3154802a-8f5a-4a28-a450-d8cef6ad232b/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2723.554566] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f993cad7-e4d7-4683-9da0-92acc24e1fd2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2723.560872] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f45afd5-1f53-4635-b8a0-158d5be17946 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2723.569622] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41662d05-3bcb-44db-bcc8-4faa85b272a1 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2723.600256] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c97cc2e-e34f-4e00-b828-c654f40d92bb {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2723.605542] env[61215]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f2cc9cbd-b166-42bf-b169-b825a8e17d3f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2723.625327] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2723.636756] env[61215]: DEBUG oslo_concurrency.lockutils [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2723.637009] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Processing image e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2723.637230] env[61215]: DEBUG oslo_concurrency.lockutils [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2723.675446] env[61215]: DEBUG oslo_vmware.rw_handles [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3154802a-8f5a-4a28-a450-d8cef6ad232b/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2723.736529] env[61215]: DEBUG oslo_vmware.rw_handles [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Completed reading data from the image iterator. {{(pid=61215) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2723.736852] env[61215]: DEBUG oslo_vmware.rw_handles [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3154802a-8f5a-4a28-a450-d8cef6ad232b/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2724.292128] env[61215]: DEBUG nova.compute.manager [req-92cd919f-0bad-46e0-b65f-f6d5c53981b8 req-77c71719-11c2-4018-b960-8251866f9c9a service nova] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Received event network-changed-14bac21c-86e3-48e0-b30e-96feb2bb249e {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 2724.292181] env[61215]: DEBUG nova.compute.manager [req-92cd919f-0bad-46e0-b65f-f6d5c53981b8 req-77c71719-11c2-4018-b960-8251866f9c9a service nova] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Refreshing instance network info cache due to event network-changed-14bac21c-86e3-48e0-b30e-96feb2bb249e. {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 2724.292375] env[61215]: DEBUG oslo_concurrency.lockutils [req-92cd919f-0bad-46e0-b65f-f6d5c53981b8 req-77c71719-11c2-4018-b960-8251866f9c9a service nova] Acquiring lock "refresh_cache-8ca1c8ea-ce6a-4945-873b-aa0e6489baf6" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2724.292523] env[61215]: DEBUG oslo_concurrency.lockutils [req-92cd919f-0bad-46e0-b65f-f6d5c53981b8 req-77c71719-11c2-4018-b960-8251866f9c9a service nova] Acquired lock "refresh_cache-8ca1c8ea-ce6a-4945-873b-aa0e6489baf6" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2724.292695] env[61215]: DEBUG nova.network.neutron [req-92cd919f-0bad-46e0-b65f-f6d5c53981b8 req-77c71719-11c2-4018-b960-8251866f9c9a service nova] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Refreshing network info cache for port 14bac21c-86e3-48e0-b30e-96feb2bb249e {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2724.533357] env[61215]: DEBUG nova.network.neutron [req-92cd919f-0bad-46e0-b65f-f6d5c53981b8 req-77c71719-11c2-4018-b960-8251866f9c9a service nova] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Updated VIF entry in instance network info cache for port 14bac21c-86e3-48e0-b30e-96feb2bb249e. {{(pid=61215) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2724.533703] env[61215]: DEBUG nova.network.neutron [req-92cd919f-0bad-46e0-b65f-f6d5c53981b8 req-77c71719-11c2-4018-b960-8251866f9c9a service nova] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Updating instance_info_cache with network_info: [{"id": "14bac21c-86e3-48e0-b30e-96feb2bb249e", "address": "fa:16:3e:98:24:f1", "network": {"id": "ca8a320f-bc61-4d27-ae96-1e7235aa0925", "bridge": "br-int", "label": "tempest-ServersTestJSON-1082542131-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d16229c82ee9494f9921831a13c6bf7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aef08290-001a-4ae8-aff0-1889e2211389", "external-id": "nsx-vlan-transportzone-389", "segmentation_id": 389, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14bac21c-86", "ovs_interfaceid": "14bac21c-86e3-48e0-b30e-96feb2bb249e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2724.542591] env[61215]: DEBUG oslo_concurrency.lockutils [req-92cd919f-0bad-46e0-b65f-f6d5c53981b8 req-77c71719-11c2-4018-b960-8251866f9c9a service nova] Releasing lock "refresh_cache-8ca1c8ea-ce6a-4945-873b-aa0e6489baf6" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2724.600601] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Unregistered the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2724.600805] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Deleting contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2724.600960] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Deleting the datastore file [datastore1] 49ab8e42-2da3-474b-b283-9d31b089fd76 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2724.601228] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ecfb11ae-eafd-4ac1-9842-897b6d5c23c9 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2724.608225] env[61215]: DEBUG oslo_vmware.api [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Waiting for the task: (returnval){ [ 2724.608225] env[61215]: value = "task-1690465" [ 2724.608225] env[61215]: _type = "Task" [ 2724.608225] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2724.615701] env[61215]: DEBUG oslo_vmware.api [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Task: {'id': task-1690465, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2725.117968] env[61215]: DEBUG oslo_vmware.api [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Task: {'id': task-1690465, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.368005} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2725.118285] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2725.118482] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Deleted contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2725.118663] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2725.118838] env[61215]: INFO nova.compute.manager [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Took 2.11 seconds to destroy the instance on the hypervisor. [ 2725.121068] env[61215]: DEBUG nova.compute.claims [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Aborting claim: {{(pid=61215) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2725.121250] env[61215]: DEBUG oslo_concurrency.lockutils [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2725.121472] env[61215]: DEBUG oslo_concurrency.lockutils [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2725.308356] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8835c625-6f03-4190-b98f-9d42c9338d08 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2725.316037] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dd33e46-5fe3-4b7a-b65d-7a3e03cad5c0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2725.346698] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02aadbd2-93a6-4094-8480-54c6ed3f6793 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2725.353414] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8230719-2217-4957-8c19-92c7733318e2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2725.366440] env[61215]: DEBUG nova.compute.provider_tree [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2725.374489] env[61215]: DEBUG nova.scheduler.client.report [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2725.394726] env[61215]: DEBUG oslo_concurrency.lockutils [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.273s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2725.395255] env[61215]: ERROR nova.compute.manager [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2725.395255] env[61215]: Faults: ['InvalidArgument'] [ 2725.395255] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Traceback (most recent call last): [ 2725.395255] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2725.395255] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] self.driver.spawn(context, instance, image_meta, [ 2725.395255] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2725.395255] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2725.395255] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2725.395255] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] self._fetch_image_if_missing(context, vi) [ 2725.395255] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2725.395255] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] image_cache(vi, tmp_image_ds_loc) [ 2725.395255] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2725.395255] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] vm_util.copy_virtual_disk( [ 2725.395255] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2725.395255] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] session._wait_for_task(vmdk_copy_task) [ 2725.395255] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2725.395255] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] return self.wait_for_task(task_ref) [ 2725.395255] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2725.395255] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] return evt.wait() [ 2725.395255] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2725.395255] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] result = hub.switch() [ 2725.395255] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2725.395255] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] return self.greenlet.switch() [ 2725.395255] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2725.395255] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] self.f(*self.args, **self.kw) [ 2725.395255] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2725.395255] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] raise exceptions.translate_fault(task_info.error) [ 2725.395255] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2725.395255] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Faults: ['InvalidArgument'] [ 2725.395255] env[61215]: ERROR nova.compute.manager [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] [ 2725.396174] env[61215]: DEBUG nova.compute.utils [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] VimFaultException {{(pid=61215) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2725.397338] env[61215]: DEBUG nova.compute.manager [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Build of instance 49ab8e42-2da3-474b-b283-9d31b089fd76 was re-scheduled: A specified parameter was not correct: fileType [ 2725.397338] env[61215]: Faults: ['InvalidArgument'] {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2725.397705] env[61215]: DEBUG nova.compute.manager [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Unplugging VIFs for instance {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2725.397881] env[61215]: DEBUG nova.compute.manager [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2725.398068] env[61215]: DEBUG nova.compute.manager [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2725.398273] env[61215]: DEBUG nova.network.neutron [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2725.751994] env[61215]: DEBUG nova.network.neutron [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2725.771711] env[61215]: INFO nova.compute.manager [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Took 0.37 seconds to deallocate network for instance. [ 2726.017959] env[61215]: INFO nova.scheduler.client.report [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Deleted allocations for instance 49ab8e42-2da3-474b-b283-9d31b089fd76 [ 2726.067985] env[61215]: DEBUG oslo_concurrency.lockutils [None req-74b575ea-9b3d-4347-8e27-4aeb96513c83 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Lock "49ab8e42-2da3-474b-b283-9d31b089fd76" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 635.351s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2726.068277] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8107a7f0-be75-4c7e-83bb-bcb6cfcbbd20 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Lock "49ab8e42-2da3-474b-b283-9d31b089fd76" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 439.788s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2726.068530] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8107a7f0-be75-4c7e-83bb-bcb6cfcbbd20 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquiring lock "49ab8e42-2da3-474b-b283-9d31b089fd76-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2726.068756] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8107a7f0-be75-4c7e-83bb-bcb6cfcbbd20 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Lock "49ab8e42-2da3-474b-b283-9d31b089fd76-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2726.068930] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8107a7f0-be75-4c7e-83bb-bcb6cfcbbd20 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Lock "49ab8e42-2da3-474b-b283-9d31b089fd76-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2726.071783] env[61215]: INFO nova.compute.manager [None req-8107a7f0-be75-4c7e-83bb-bcb6cfcbbd20 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Terminating instance [ 2726.072678] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8107a7f0-be75-4c7e-83bb-bcb6cfcbbd20 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquiring lock "refresh_cache-49ab8e42-2da3-474b-b283-9d31b089fd76" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2726.072862] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8107a7f0-be75-4c7e-83bb-bcb6cfcbbd20 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquired lock "refresh_cache-49ab8e42-2da3-474b-b283-9d31b089fd76" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2726.073052] env[61215]: DEBUG nova.network.neutron [None req-8107a7f0-be75-4c7e-83bb-bcb6cfcbbd20 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2726.106391] env[61215]: DEBUG nova.network.neutron [None req-8107a7f0-be75-4c7e-83bb-bcb6cfcbbd20 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2726.340030] env[61215]: DEBUG nova.network.neutron [None req-8107a7f0-be75-4c7e-83bb-bcb6cfcbbd20 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2726.349976] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8107a7f0-be75-4c7e-83bb-bcb6cfcbbd20 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Releasing lock "refresh_cache-49ab8e42-2da3-474b-b283-9d31b089fd76" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2726.351161] env[61215]: DEBUG nova.compute.manager [None req-8107a7f0-be75-4c7e-83bb-bcb6cfcbbd20 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2726.351161] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-8107a7f0-be75-4c7e-83bb-bcb6cfcbbd20 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2726.351309] env[61215]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d6790d1d-d758-4e2d-b8b5-53431d3e931d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2726.361074] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e7b983b-d86e-416e-bc75-70afc207caae {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2726.388259] env[61215]: WARNING nova.virt.vmwareapi.vmops [None req-8107a7f0-be75-4c7e-83bb-bcb6cfcbbd20 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 49ab8e42-2da3-474b-b283-9d31b089fd76 could not be found. [ 2726.388544] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-8107a7f0-be75-4c7e-83bb-bcb6cfcbbd20 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2726.388786] env[61215]: INFO nova.compute.manager [None req-8107a7f0-be75-4c7e-83bb-bcb6cfcbbd20 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2726.389055] env[61215]: DEBUG oslo.service.loopingcall [None req-8107a7f0-be75-4c7e-83bb-bcb6cfcbbd20 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2726.389284] env[61215]: DEBUG nova.compute.manager [-] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2726.389382] env[61215]: DEBUG nova.network.neutron [-] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2726.406156] env[61215]: DEBUG nova.network.neutron [-] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2726.415975] env[61215]: DEBUG nova.network.neutron [-] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2726.423354] env[61215]: INFO nova.compute.manager [-] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] Took 0.03 seconds to deallocate network for instance. [ 2726.566426] env[61215]: DEBUG oslo_concurrency.lockutils [None req-8107a7f0-be75-4c7e-83bb-bcb6cfcbbd20 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Lock "49ab8e42-2da3-474b-b283-9d31b089fd76" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.498s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2726.567629] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "49ab8e42-2da3-474b-b283-9d31b089fd76" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 284.571s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2726.567923] env[61215]: INFO nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 49ab8e42-2da3-474b-b283-9d31b089fd76] During sync_power_state the instance has a pending task (deleting). Skip. [ 2726.568308] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "49ab8e42-2da3-474b-b283-9d31b089fd76" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2761.660208] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2766.653690] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2770.655589] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2771.654891] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2771.654891] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2771.654891] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61215) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 2772.491319] env[61215]: WARNING oslo_vmware.rw_handles [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2772.491319] env[61215]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2772.491319] env[61215]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2772.491319] env[61215]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2772.491319] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2772.491319] env[61215]: ERROR oslo_vmware.rw_handles response.begin() [ 2772.491319] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2772.491319] env[61215]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2772.491319] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2772.491319] env[61215]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2772.491319] env[61215]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2772.491319] env[61215]: ERROR oslo_vmware.rw_handles [ 2772.492143] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Downloaded image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to vmware_temp/3154802a-8f5a-4a28-a450-d8cef6ad232b/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2772.493621] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Caching image {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2772.493871] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Copying Virtual Disk [datastore1] vmware_temp/3154802a-8f5a-4a28-a450-d8cef6ad232b/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk to [datastore1] vmware_temp/3154802a-8f5a-4a28-a450-d8cef6ad232b/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk {{(pid=61215) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2772.494176] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d9a61133-169a-446b-8fc9-f9601eb256f7 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2772.502230] env[61215]: DEBUG oslo_vmware.api [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Waiting for the task: (returnval){ [ 2772.502230] env[61215]: value = "task-1690466" [ 2772.502230] env[61215]: _type = "Task" [ 2772.502230] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2772.509793] env[61215]: DEBUG oslo_vmware.api [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Task: {'id': task-1690466, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2772.653671] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2772.653914] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2772.666226] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2772.666447] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2772.666617] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2772.666777] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61215) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2772.667933] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f366b20f-22f2-434c-95c5-e4d66d8af5ce {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2772.677029] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38f853fc-2a37-46de-9669-f7c6f4258e66 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2772.689878] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c523b887-12df-45b3-8554-6af8c82eb9b8 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2772.695822] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b3ecb40-581f-477a-9f41-c68ec1d41913 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2772.725140] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181309MB free_disk=173GB free_vcpus=48 pci_devices=None {{(pid=61215) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2772.725297] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2772.725505] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2772.786524] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 799c902d-2bc1-4738-b3af-772a5feea819 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2772.786685] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 02265af9-44e6-4341-ba30-be7caad7da8b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2772.786816] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 81e63102-75dc-4f4b-9b48-a63b2a9123f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2772.786940] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance e13fe4b8-f445-46f6-a896-8db6fd85fa71 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2772.787105] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance bc18b836-2fdc-4750-8720-b5b5433fec84 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2772.787239] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2772.787418] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2772.787557] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2772.860527] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d83978e-a592-48b4-b37b-209d230d4ee1 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2772.867869] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5532a63f-55af-466c-a4a6-0988f0c28d87 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2772.896152] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b563f182-bf69-486a-9345-feea6de3c2cc {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2772.902889] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb318ff7-5ce8-4abb-9a30-d01a943cd3a3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2772.916267] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2772.923991] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2772.942680] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61215) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2772.942861] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.217s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2773.012123] env[61215]: DEBUG oslo_vmware.exceptions [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Fault InvalidArgument not matched. {{(pid=61215) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2773.012374] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2773.012908] env[61215]: ERROR nova.compute.manager [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2773.012908] env[61215]: Faults: ['InvalidArgument'] [ 2773.012908] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Traceback (most recent call last): [ 2773.012908] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2773.012908] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] yield resources [ 2773.012908] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2773.012908] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] self.driver.spawn(context, instance, image_meta, [ 2773.012908] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2773.012908] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2773.012908] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2773.012908] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] self._fetch_image_if_missing(context, vi) [ 2773.012908] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2773.012908] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] image_cache(vi, tmp_image_ds_loc) [ 2773.012908] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2773.012908] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] vm_util.copy_virtual_disk( [ 2773.012908] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2773.012908] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] session._wait_for_task(vmdk_copy_task) [ 2773.012908] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2773.012908] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] return self.wait_for_task(task_ref) [ 2773.012908] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2773.012908] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] return evt.wait() [ 2773.012908] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2773.012908] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] result = hub.switch() [ 2773.012908] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2773.012908] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] return self.greenlet.switch() [ 2773.012908] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2773.012908] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] self.f(*self.args, **self.kw) [ 2773.012908] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2773.012908] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] raise exceptions.translate_fault(task_info.error) [ 2773.012908] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2773.012908] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Faults: ['InvalidArgument'] [ 2773.012908] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] [ 2773.014407] env[61215]: INFO nova.compute.manager [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Terminating instance [ 2773.014705] env[61215]: DEBUG oslo_concurrency.lockutils [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2773.014913] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2773.015213] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dc138d8b-a753-4a70-af7a-be21d4f79bc1 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2773.017554] env[61215]: DEBUG nova.compute.manager [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2773.017748] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2773.018493] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86f7d94c-fcb8-4796-b3a2-41223b10fc15 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2773.025063] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Unregistering the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2773.025280] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5792cf39-1f75-486f-bd08-b48b93bb9356 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2773.027281] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2773.027460] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61215) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2773.028404] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35ebbc4b-7819-4217-bd3c-bd0be3a7ea33 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2773.033349] env[61215]: DEBUG oslo_vmware.api [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Waiting for the task: (returnval){ [ 2773.033349] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52549ac5-c545-a67f-1bae-9e3dc9b20088" [ 2773.033349] env[61215]: _type = "Task" [ 2773.033349] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2773.040189] env[61215]: DEBUG oslo_vmware.api [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52549ac5-c545-a67f-1bae-9e3dc9b20088, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2773.100154] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Unregistered the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2773.100393] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Deleting contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2773.100583] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Deleting the datastore file [datastore1] 799c902d-2bc1-4738-b3af-772a5feea819 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2773.100853] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d4779896-214c-4a19-9616-0c8c793c7821 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2773.113016] env[61215]: DEBUG oslo_vmware.api [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Waiting for the task: (returnval){ [ 2773.113016] env[61215]: value = "task-1690468" [ 2773.113016] env[61215]: _type = "Task" [ 2773.113016] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2773.118830] env[61215]: DEBUG oslo_vmware.api [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Task: {'id': task-1690468, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2773.543866] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Preparing fetch location {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2773.544279] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Creating directory with path [datastore1] vmware_temp/d0990fd4-2ae6-4d9c-9045-ae02c86acf14/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2773.544411] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ff1f12c3-d9a5-4aec-842c-fd5470393d07 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2773.555814] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Created directory with path [datastore1] vmware_temp/d0990fd4-2ae6-4d9c-9045-ae02c86acf14/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2773.555996] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Fetch image to [datastore1] vmware_temp/d0990fd4-2ae6-4d9c-9045-ae02c86acf14/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2773.556188] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to [datastore1] vmware_temp/d0990fd4-2ae6-4d9c-9045-ae02c86acf14/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2773.556907] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a07f186-ad32-47a3-8a17-2478f69224d4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2773.563372] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-936f38d1-6d74-4efb-893a-299d213e666b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2773.571974] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acf45b95-5eef-4c39-bc36-1bd2603d6b7a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2773.602593] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19c17b75-d390-46a0-859f-86b289d013f2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2773.607822] env[61215]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5161e281-1240-4c97-a704-dd6113219130 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2773.618900] env[61215]: DEBUG oslo_vmware.api [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Task: {'id': task-1690468, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080359} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2773.619127] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2773.619311] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Deleted contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2773.619481] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2773.619651] env[61215]: INFO nova.compute.manager [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2773.621825] env[61215]: DEBUG nova.compute.claims [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Aborting claim: {{(pid=61215) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2773.622032] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2773.622265] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2773.627409] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2773.681407] env[61215]: DEBUG oslo_vmware.rw_handles [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d0990fd4-2ae6-4d9c-9045-ae02c86acf14/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2773.742286] env[61215]: DEBUG oslo_vmware.rw_handles [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Completed reading data from the image iterator. {{(pid=61215) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2773.742470] env[61215]: DEBUG oslo_vmware.rw_handles [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d0990fd4-2ae6-4d9c-9045-ae02c86acf14/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2773.790212] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7a91ab3-653e-4428-a170-dff816048445 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2773.797449] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97875a2e-59db-4cd6-8209-8b2d8cce8262 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2773.826963] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dbe1b21-4fc8-4a29-b6ad-08976bc3002f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2773.833531] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd35515e-79e7-4a21-9c45-aa9cfff7e47a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2773.846357] env[61215]: DEBUG nova.compute.provider_tree [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2773.855012] env[61215]: DEBUG nova.scheduler.client.report [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2773.868159] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.246s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2773.868713] env[61215]: ERROR nova.compute.manager [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2773.868713] env[61215]: Faults: ['InvalidArgument'] [ 2773.868713] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Traceback (most recent call last): [ 2773.868713] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2773.868713] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] self.driver.spawn(context, instance, image_meta, [ 2773.868713] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2773.868713] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2773.868713] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2773.868713] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] self._fetch_image_if_missing(context, vi) [ 2773.868713] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2773.868713] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] image_cache(vi, tmp_image_ds_loc) [ 2773.868713] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2773.868713] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] vm_util.copy_virtual_disk( [ 2773.868713] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2773.868713] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] session._wait_for_task(vmdk_copy_task) [ 2773.868713] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2773.868713] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] return self.wait_for_task(task_ref) [ 2773.868713] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2773.868713] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] return evt.wait() [ 2773.868713] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2773.868713] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] result = hub.switch() [ 2773.868713] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2773.868713] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] return self.greenlet.switch() [ 2773.868713] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2773.868713] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] self.f(*self.args, **self.kw) [ 2773.868713] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2773.868713] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] raise exceptions.translate_fault(task_info.error) [ 2773.868713] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2773.868713] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Faults: ['InvalidArgument'] [ 2773.868713] env[61215]: ERROR nova.compute.manager [instance: 799c902d-2bc1-4738-b3af-772a5feea819] [ 2773.869853] env[61215]: DEBUG nova.compute.utils [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] VimFaultException {{(pid=61215) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2773.870742] env[61215]: DEBUG nova.compute.manager [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Build of instance 799c902d-2bc1-4738-b3af-772a5feea819 was re-scheduled: A specified parameter was not correct: fileType [ 2773.870742] env[61215]: Faults: ['InvalidArgument'] {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2773.871157] env[61215]: DEBUG nova.compute.manager [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Unplugging VIFs for instance {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2773.871339] env[61215]: DEBUG nova.compute.manager [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2773.871514] env[61215]: DEBUG nova.compute.manager [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2773.871680] env[61215]: DEBUG nova.network.neutron [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2774.201687] env[61215]: DEBUG nova.network.neutron [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2774.215076] env[61215]: INFO nova.compute.manager [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Took 0.34 seconds to deallocate network for instance. [ 2774.329505] env[61215]: INFO nova.scheduler.client.report [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Deleted allocations for instance 799c902d-2bc1-4738-b3af-772a5feea819 [ 2774.352840] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9d8d0e44-895b-41a5-b294-be41d206208c tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Lock "799c902d-2bc1-4738-b3af-772a5feea819" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 499.969s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2774.353101] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "799c902d-2bc1-4738-b3af-772a5feea819" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 332.357s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2774.353369] env[61215]: INFO nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] During sync_power_state the instance has a pending task (spawning). Skip. [ 2774.353562] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "799c902d-2bc1-4738-b3af-772a5feea819" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2774.354052] env[61215]: DEBUG oslo_concurrency.lockutils [None req-1a94f1cd-6256-4f57-9373-a00cfc737d93 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Lock "799c902d-2bc1-4738-b3af-772a5feea819" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 304.221s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2774.354301] env[61215]: DEBUG oslo_concurrency.lockutils [None req-1a94f1cd-6256-4f57-9373-a00cfc737d93 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Acquiring lock "799c902d-2bc1-4738-b3af-772a5feea819-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2774.354512] env[61215]: DEBUG oslo_concurrency.lockutils [None req-1a94f1cd-6256-4f57-9373-a00cfc737d93 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Lock "799c902d-2bc1-4738-b3af-772a5feea819-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2774.354681] env[61215]: DEBUG oslo_concurrency.lockutils [None req-1a94f1cd-6256-4f57-9373-a00cfc737d93 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Lock "799c902d-2bc1-4738-b3af-772a5feea819-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2774.356609] env[61215]: INFO nova.compute.manager [None req-1a94f1cd-6256-4f57-9373-a00cfc737d93 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Terminating instance [ 2774.358329] env[61215]: DEBUG nova.compute.manager [None req-1a94f1cd-6256-4f57-9373-a00cfc737d93 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2774.358528] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-1a94f1cd-6256-4f57-9373-a00cfc737d93 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2774.358784] env[61215]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3f8ff367-b80b-46a7-8499-43f568c80f7e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2774.367704] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c21eeaba-925f-4ef3-932a-da9cccd88d40 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2774.393569] env[61215]: WARNING nova.virt.vmwareapi.vmops [None req-1a94f1cd-6256-4f57-9373-a00cfc737d93 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 799c902d-2bc1-4738-b3af-772a5feea819 could not be found. [ 2774.393776] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-1a94f1cd-6256-4f57-9373-a00cfc737d93 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2774.393959] env[61215]: INFO nova.compute.manager [None req-1a94f1cd-6256-4f57-9373-a00cfc737d93 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2774.394221] env[61215]: DEBUG oslo.service.loopingcall [None req-1a94f1cd-6256-4f57-9373-a00cfc737d93 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2774.394703] env[61215]: DEBUG nova.compute.manager [-] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2774.394804] env[61215]: DEBUG nova.network.neutron [-] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2774.418515] env[61215]: DEBUG nova.network.neutron [-] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2774.426375] env[61215]: INFO nova.compute.manager [-] [instance: 799c902d-2bc1-4738-b3af-772a5feea819] Took 0.03 seconds to deallocate network for instance. [ 2774.516489] env[61215]: DEBUG oslo_concurrency.lockutils [None req-1a94f1cd-6256-4f57-9373-a00cfc737d93 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Lock "799c902d-2bc1-4738-b3af-772a5feea819" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.162s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2774.943951] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2774.944344] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Starting heal instance info cache {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 2774.944344] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Rebuilding the list of instances to heal {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2774.962761] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2774.962951] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2774.963070] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2774.963213] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2774.963340] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2774.963465] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Didn't find any instances for network info cache update. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 2775.654267] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2780.651613] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2821.524186] env[61215]: WARNING oslo_vmware.rw_handles [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2821.524186] env[61215]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2821.524186] env[61215]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2821.524186] env[61215]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2821.524186] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2821.524186] env[61215]: ERROR oslo_vmware.rw_handles response.begin() [ 2821.524186] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2821.524186] env[61215]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2821.524186] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2821.524186] env[61215]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2821.524186] env[61215]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2821.524186] env[61215]: ERROR oslo_vmware.rw_handles [ 2821.524750] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Downloaded image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to vmware_temp/d0990fd4-2ae6-4d9c-9045-ae02c86acf14/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2821.526582] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Caching image {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2821.526886] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Copying Virtual Disk [datastore1] vmware_temp/d0990fd4-2ae6-4d9c-9045-ae02c86acf14/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk to [datastore1] vmware_temp/d0990fd4-2ae6-4d9c-9045-ae02c86acf14/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk {{(pid=61215) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2821.527204] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f259cf65-168c-492d-9a09-860a53f8e08e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2821.536533] env[61215]: DEBUG oslo_vmware.api [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Waiting for the task: (returnval){ [ 2821.536533] env[61215]: value = "task-1690469" [ 2821.536533] env[61215]: _type = "Task" [ 2821.536533] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2821.544287] env[61215]: DEBUG oslo_vmware.api [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Task: {'id': task-1690469, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2822.047264] env[61215]: DEBUG oslo_vmware.exceptions [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Fault InvalidArgument not matched. {{(pid=61215) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2822.047558] env[61215]: DEBUG oslo_concurrency.lockutils [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2822.048119] env[61215]: ERROR nova.compute.manager [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2822.048119] env[61215]: Faults: ['InvalidArgument'] [ 2822.048119] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Traceback (most recent call last): [ 2822.048119] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2822.048119] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] yield resources [ 2822.048119] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2822.048119] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] self.driver.spawn(context, instance, image_meta, [ 2822.048119] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2822.048119] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2822.048119] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2822.048119] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] self._fetch_image_if_missing(context, vi) [ 2822.048119] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2822.048119] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] image_cache(vi, tmp_image_ds_loc) [ 2822.048119] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2822.048119] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] vm_util.copy_virtual_disk( [ 2822.048119] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2822.048119] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] session._wait_for_task(vmdk_copy_task) [ 2822.048119] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2822.048119] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] return self.wait_for_task(task_ref) [ 2822.048119] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2822.048119] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] return evt.wait() [ 2822.048119] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2822.048119] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] result = hub.switch() [ 2822.048119] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2822.048119] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] return self.greenlet.switch() [ 2822.048119] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2822.048119] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] self.f(*self.args, **self.kw) [ 2822.048119] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2822.048119] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] raise exceptions.translate_fault(task_info.error) [ 2822.048119] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2822.048119] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Faults: ['InvalidArgument'] [ 2822.048119] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] [ 2822.048867] env[61215]: INFO nova.compute.manager [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Terminating instance [ 2822.050027] env[61215]: DEBUG oslo_concurrency.lockutils [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2822.050236] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2822.050477] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-24c3b610-1af6-4c9a-8954-5e922e17dfe1 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2822.052763] env[61215]: DEBUG nova.compute.manager [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2822.052958] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2822.053674] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7698d3d-90db-4039-a50d-f38fa3705d62 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2822.060176] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Unregistering the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2822.060378] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0ca537e2-f2e8-4c46-a70c-6232c380b5d5 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2822.062401] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2822.062578] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61215) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2822.063510] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd3e1905-9f00-4eed-b7ab-bf69f1ff0596 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2822.069137] env[61215]: DEBUG oslo_vmware.api [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Waiting for the task: (returnval){ [ 2822.069137] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52f2ccbe-b15d-8295-e51f-0904eb54bda2" [ 2822.069137] env[61215]: _type = "Task" [ 2822.069137] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2822.075823] env[61215]: DEBUG oslo_vmware.api [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52f2ccbe-b15d-8295-e51f-0904eb54bda2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2822.137410] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Unregistered the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2822.137630] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Deleting contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2822.137816] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Deleting the datastore file [datastore1] 02265af9-44e6-4341-ba30-be7caad7da8b {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2822.138084] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cd268b8e-c395-45c0-85dd-f2f2f3683878 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2822.144129] env[61215]: DEBUG oslo_vmware.api [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Waiting for the task: (returnval){ [ 2822.144129] env[61215]: value = "task-1690471" [ 2822.144129] env[61215]: _type = "Task" [ 2822.144129] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2822.151276] env[61215]: DEBUG oslo_vmware.api [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Task: {'id': task-1690471, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2822.579501] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Preparing fetch location {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2822.579830] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Creating directory with path [datastore1] vmware_temp/8a045b91-6186-4d0f-96d7-1c54290be98a/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2822.580018] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e87a7bba-95fd-48b1-91f6-07cd0b0f1e13 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2822.590998] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Created directory with path [datastore1] vmware_temp/8a045b91-6186-4d0f-96d7-1c54290be98a/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2822.591202] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Fetch image to [datastore1] vmware_temp/8a045b91-6186-4d0f-96d7-1c54290be98a/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2822.591378] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to [datastore1] vmware_temp/8a045b91-6186-4d0f-96d7-1c54290be98a/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2822.592114] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efdb7bf4-1755-47d2-acdf-d56d0ecd7b37 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2822.598289] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4b1516a-49b7-4c1c-b2aa-ddf2b711b382 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2822.606991] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87cd5bf3-5ce1-459a-ba23-18986588274e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2822.636573] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8072d1f4-c53e-4d2a-b93d-1518d7241c9e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2822.642057] env[61215]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a4de9224-5fc8-4b21-9116-2c7cd3761cf6 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2822.652760] env[61215]: DEBUG oslo_vmware.api [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Task: {'id': task-1690471, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07585} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2822.652978] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2822.653170] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Deleted contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2822.653346] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2822.653514] env[61215]: INFO nova.compute.manager [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2822.655664] env[61215]: DEBUG nova.compute.claims [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Aborting claim: {{(pid=61215) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2822.655941] env[61215]: DEBUG oslo_concurrency.lockutils [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2822.656311] env[61215]: DEBUG oslo_concurrency.lockutils [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2822.664220] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2822.666582] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2822.716244] env[61215]: DEBUG oslo_vmware.rw_handles [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8a045b91-6186-4d0f-96d7-1c54290be98a/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2822.777208] env[61215]: DEBUG oslo_vmware.rw_handles [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Completed reading data from the image iterator. {{(pid=61215) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2822.777399] env[61215]: DEBUG oslo_vmware.rw_handles [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8a045b91-6186-4d0f-96d7-1c54290be98a/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2822.829341] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4434ea7c-22c1-4d03-b8d2-2ba02d0a1ec2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2822.836744] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3001dbf-26ae-4363-b2d2-882fd3e908d3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2822.867269] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9debc2f7-9af3-486f-81f9-2da41b7e8118 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2822.874314] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9632929-dff3-48fb-a510-934dd42eb872 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2822.887140] env[61215]: DEBUG nova.compute.provider_tree [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2822.895701] env[61215]: DEBUG nova.scheduler.client.report [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2822.912120] env[61215]: DEBUG oslo_concurrency.lockutils [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.256s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2822.912686] env[61215]: ERROR nova.compute.manager [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2822.912686] env[61215]: Faults: ['InvalidArgument'] [ 2822.912686] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Traceback (most recent call last): [ 2822.912686] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2822.912686] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] self.driver.spawn(context, instance, image_meta, [ 2822.912686] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2822.912686] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2822.912686] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2822.912686] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] self._fetch_image_if_missing(context, vi) [ 2822.912686] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2822.912686] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] image_cache(vi, tmp_image_ds_loc) [ 2822.912686] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2822.912686] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] vm_util.copy_virtual_disk( [ 2822.912686] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2822.912686] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] session._wait_for_task(vmdk_copy_task) [ 2822.912686] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2822.912686] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] return self.wait_for_task(task_ref) [ 2822.912686] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2822.912686] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] return evt.wait() [ 2822.912686] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2822.912686] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] result = hub.switch() [ 2822.912686] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2822.912686] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] return self.greenlet.switch() [ 2822.912686] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2822.912686] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] self.f(*self.args, **self.kw) [ 2822.912686] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2822.912686] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] raise exceptions.translate_fault(task_info.error) [ 2822.912686] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2822.912686] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Faults: ['InvalidArgument'] [ 2822.912686] env[61215]: ERROR nova.compute.manager [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] [ 2822.913446] env[61215]: DEBUG nova.compute.utils [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] VimFaultException {{(pid=61215) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2822.914892] env[61215]: DEBUG nova.compute.manager [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Build of instance 02265af9-44e6-4341-ba30-be7caad7da8b was re-scheduled: A specified parameter was not correct: fileType [ 2822.914892] env[61215]: Faults: ['InvalidArgument'] {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2822.915284] env[61215]: DEBUG nova.compute.manager [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Unplugging VIFs for instance {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2822.915462] env[61215]: DEBUG nova.compute.manager [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2822.915638] env[61215]: DEBUG nova.compute.manager [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2822.915853] env[61215]: DEBUG nova.network.neutron [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2823.498725] env[61215]: DEBUG nova.network.neutron [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2823.509397] env[61215]: INFO nova.compute.manager [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Took 0.59 seconds to deallocate network for instance. [ 2823.604027] env[61215]: INFO nova.scheduler.client.report [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Deleted allocations for instance 02265af9-44e6-4341-ba30-be7caad7da8b [ 2823.627997] env[61215]: DEBUG oslo_concurrency.lockutils [None req-544fed3d-bd78-4b55-8bb5-0186da9ec8f3 tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Lock "02265af9-44e6-4341-ba30-be7caad7da8b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 535.564s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2823.628293] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "02265af9-44e6-4341-ba30-be7caad7da8b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 381.632s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2823.628490] env[61215]: INFO nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] During sync_power_state the instance has a pending task (spawning). Skip. [ 2823.628672] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "02265af9-44e6-4341-ba30-be7caad7da8b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2823.629175] env[61215]: DEBUG oslo_concurrency.lockutils [None req-43615b5e-2186-4b3e-822c-99e4b93bd56e tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Lock "02265af9-44e6-4341-ba30-be7caad7da8b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 340.285s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2823.629420] env[61215]: DEBUG oslo_concurrency.lockutils [None req-43615b5e-2186-4b3e-822c-99e4b93bd56e tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Acquiring lock "02265af9-44e6-4341-ba30-be7caad7da8b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2823.629639] env[61215]: DEBUG oslo_concurrency.lockutils [None req-43615b5e-2186-4b3e-822c-99e4b93bd56e tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Lock "02265af9-44e6-4341-ba30-be7caad7da8b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2823.629821] env[61215]: DEBUG oslo_concurrency.lockutils [None req-43615b5e-2186-4b3e-822c-99e4b93bd56e tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Lock "02265af9-44e6-4341-ba30-be7caad7da8b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2823.631735] env[61215]: INFO nova.compute.manager [None req-43615b5e-2186-4b3e-822c-99e4b93bd56e tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Terminating instance [ 2823.634706] env[61215]: DEBUG nova.compute.manager [None req-43615b5e-2186-4b3e-822c-99e4b93bd56e tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2823.634706] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-43615b5e-2186-4b3e-822c-99e4b93bd56e tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2823.634706] env[61215]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-582da4ff-3a34-4f16-b253-b40e2f4d5bde {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2823.644781] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b968fd77-d99b-4799-b956-853e53022097 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2823.669410] env[61215]: WARNING nova.virt.vmwareapi.vmops [None req-43615b5e-2186-4b3e-822c-99e4b93bd56e tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 02265af9-44e6-4341-ba30-be7caad7da8b could not be found. [ 2823.669611] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-43615b5e-2186-4b3e-822c-99e4b93bd56e tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2823.669801] env[61215]: INFO nova.compute.manager [None req-43615b5e-2186-4b3e-822c-99e4b93bd56e tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2823.670061] env[61215]: DEBUG oslo.service.loopingcall [None req-43615b5e-2186-4b3e-822c-99e4b93bd56e tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2823.670288] env[61215]: DEBUG nova.compute.manager [-] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2823.670388] env[61215]: DEBUG nova.network.neutron [-] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2823.694360] env[61215]: DEBUG nova.network.neutron [-] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2823.701853] env[61215]: INFO nova.compute.manager [-] [instance: 02265af9-44e6-4341-ba30-be7caad7da8b] Took 0.03 seconds to deallocate network for instance. [ 2823.782535] env[61215]: DEBUG oslo_concurrency.lockutils [None req-43615b5e-2186-4b3e-822c-99e4b93bd56e tempest-AttachVolumeTestJSON-1355660169 tempest-AttachVolumeTestJSON-1355660169-project-member] Lock "02265af9-44e6-4341-ba30-be7caad7da8b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.153s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2827.654252] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2831.654534] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2832.654778] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2832.655159] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2832.655239] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61215) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 2832.655338] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2832.667213] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2832.667454] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2832.667628] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2832.667790] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61215) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2832.668919] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3659971-acac-4254-ba30-dce2d067f179 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2832.678621] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed5b3927-3519-413a-8b71-45045d374a52 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2832.693147] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3403524a-b42e-4b92-ab45-265d09b4ff98 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2832.699354] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f262a8b7-6e42-497e-aac1-e6f0df398e6a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2832.729017] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181334MB free_disk=173GB free_vcpus=48 pci_devices=None {{(pid=61215) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2832.729214] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2832.729373] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2832.784285] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 81e63102-75dc-4f4b-9b48-a63b2a9123f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2832.784441] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance e13fe4b8-f445-46f6-a896-8db6fd85fa71 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2832.784573] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance bc18b836-2fdc-4750-8720-b5b5433fec84 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2832.784699] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2832.784872] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2832.785025] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1024MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2832.842073] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92833bf1-91fa-4fa4-ae0d-02f78f1ce4e2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2832.847741] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c29acfd3-1889-4b9b-9898-528993895103 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2832.876405] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8cd6fb1-29c8-4399-b6ba-6a41ebbd654c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2832.883442] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c4d4300-c9df-47fb-b0b1-6b53992bd65f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2832.896546] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2832.904297] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2832.920201] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61215) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2832.920358] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.191s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2834.919590] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2836.655036] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2836.655036] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Starting heal instance info cache {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 2836.655036] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Rebuilding the list of instances to heal {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2836.667674] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2836.667836] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2836.667970] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2836.668109] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2836.668233] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Didn't find any instances for network info cache update. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 2836.668700] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2864.710602] env[61215]: DEBUG oslo_concurrency.lockutils [None req-b0990d90-15d6-40cd-9e3d-71d08a9eddd7 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Acquiring lock "bc18b836-2fdc-4750-8720-b5b5433fec84" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2869.584970] env[61215]: WARNING oslo_vmware.rw_handles [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2869.584970] env[61215]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2869.584970] env[61215]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2869.584970] env[61215]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2869.584970] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2869.584970] env[61215]: ERROR oslo_vmware.rw_handles response.begin() [ 2869.584970] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2869.584970] env[61215]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2869.584970] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2869.584970] env[61215]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2869.584970] env[61215]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2869.584970] env[61215]: ERROR oslo_vmware.rw_handles [ 2869.584970] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Downloaded image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to vmware_temp/8a045b91-6186-4d0f-96d7-1c54290be98a/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2869.586779] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Caching image {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2869.587077] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Copying Virtual Disk [datastore1] vmware_temp/8a045b91-6186-4d0f-96d7-1c54290be98a/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk to [datastore1] vmware_temp/8a045b91-6186-4d0f-96d7-1c54290be98a/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk {{(pid=61215) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2869.587395] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-47bdbec1-a634-400c-9957-cc46e172487f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2869.596377] env[61215]: DEBUG oslo_vmware.api [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Waiting for the task: (returnval){ [ 2869.596377] env[61215]: value = "task-1690472" [ 2869.596377] env[61215]: _type = "Task" [ 2869.596377] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2869.604252] env[61215]: DEBUG oslo_vmware.api [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Task: {'id': task-1690472, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2870.107354] env[61215]: DEBUG oslo_vmware.exceptions [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Fault InvalidArgument not matched. {{(pid=61215) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2870.107653] env[61215]: DEBUG oslo_concurrency.lockutils [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2870.108243] env[61215]: ERROR nova.compute.manager [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2870.108243] env[61215]: Faults: ['InvalidArgument'] [ 2870.108243] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Traceback (most recent call last): [ 2870.108243] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2870.108243] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] yield resources [ 2870.108243] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2870.108243] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] self.driver.spawn(context, instance, image_meta, [ 2870.108243] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2870.108243] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2870.108243] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2870.108243] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] self._fetch_image_if_missing(context, vi) [ 2870.108243] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2870.108243] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] image_cache(vi, tmp_image_ds_loc) [ 2870.108243] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2870.108243] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] vm_util.copy_virtual_disk( [ 2870.108243] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2870.108243] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] session._wait_for_task(vmdk_copy_task) [ 2870.108243] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2870.108243] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] return self.wait_for_task(task_ref) [ 2870.108243] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2870.108243] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] return evt.wait() [ 2870.108243] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2870.108243] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] result = hub.switch() [ 2870.108243] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2870.108243] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] return self.greenlet.switch() [ 2870.108243] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2870.108243] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] self.f(*self.args, **self.kw) [ 2870.108243] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2870.108243] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] raise exceptions.translate_fault(task_info.error) [ 2870.108243] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2870.108243] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Faults: ['InvalidArgument'] [ 2870.108243] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] [ 2870.109329] env[61215]: INFO nova.compute.manager [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Terminating instance [ 2870.110184] env[61215]: DEBUG oslo_concurrency.lockutils [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2870.110402] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2870.110644] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3d9743a9-6230-4c61-82a7-f144cb40a64b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2870.112905] env[61215]: DEBUG nova.compute.manager [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2870.113127] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2870.113859] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58e51dce-0c54-4b60-a3b9-480277ab5f47 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2870.121142] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Unregistering the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2870.121355] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a17ef886-66d0-4520-b68a-999abc2f2f67 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2870.123383] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2870.123562] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61215) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2870.124538] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2091db3b-5883-4149-a82b-a8d1c791337c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2870.128935] env[61215]: DEBUG oslo_vmware.api [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Waiting for the task: (returnval){ [ 2870.128935] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52ccc258-0285-7e2e-a70b-5c31a725492f" [ 2870.128935] env[61215]: _type = "Task" [ 2870.128935] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2870.136146] env[61215]: DEBUG oslo_vmware.api [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52ccc258-0285-7e2e-a70b-5c31a725492f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2870.200221] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Unregistered the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2870.200447] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Deleting contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2870.200635] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Deleting the datastore file [datastore1] 81e63102-75dc-4f4b-9b48-a63b2a9123f2 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2870.200899] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6e997400-d087-47b3-87da-6313fe2ada6b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2870.207788] env[61215]: DEBUG oslo_vmware.api [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Waiting for the task: (returnval){ [ 2870.207788] env[61215]: value = "task-1690474" [ 2870.207788] env[61215]: _type = "Task" [ 2870.207788] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2870.215363] env[61215]: DEBUG oslo_vmware.api [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Task: {'id': task-1690474, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2870.639573] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Preparing fetch location {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2870.639931] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Creating directory with path [datastore1] vmware_temp/e3a6b22e-ff1d-4a2c-8f74-53ec5bb0d522/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2870.640116] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fed39a63-014b-4165-9cd1-3fc2d59f430e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2870.652811] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Created directory with path [datastore1] vmware_temp/e3a6b22e-ff1d-4a2c-8f74-53ec5bb0d522/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2870.652994] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Fetch image to [datastore1] vmware_temp/e3a6b22e-ff1d-4a2c-8f74-53ec5bb0d522/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2870.653183] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to [datastore1] vmware_temp/e3a6b22e-ff1d-4a2c-8f74-53ec5bb0d522/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2870.653885] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01702602-8ff9-4452-8cb1-f756b13af52a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2870.660289] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26289b2e-b1e2-485d-b3cd-6e95d3bdf694 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2870.669109] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa8270a2-0b5e-42af-8136-87cba916739e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2870.698936] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deebf4d1-b0e1-4284-a156-530de6baa4ad {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2870.704359] env[61215]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1bf52c80-e6ca-4a55-b795-f8fce8dba9c4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2870.716142] env[61215]: DEBUG oslo_vmware.api [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Task: {'id': task-1690474, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.091357} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2870.716379] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2870.716562] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Deleted contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2870.716840] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2870.717138] env[61215]: INFO nova.compute.manager [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2870.719227] env[61215]: DEBUG nova.compute.claims [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Aborting claim: {{(pid=61215) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2870.719402] env[61215]: DEBUG oslo_concurrency.lockutils [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2870.719645] env[61215]: DEBUG oslo_concurrency.lockutils [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2870.723730] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2870.774091] env[61215]: DEBUG oslo_vmware.rw_handles [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e3a6b22e-ff1d-4a2c-8f74-53ec5bb0d522/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2870.834718] env[61215]: DEBUG oslo_vmware.rw_handles [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Completed reading data from the image iterator. {{(pid=61215) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2870.834910] env[61215]: DEBUG oslo_vmware.rw_handles [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e3a6b22e-ff1d-4a2c-8f74-53ec5bb0d522/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2874.180250] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03f0405b-225b-4ec9-beb3-f667d07611ce {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2874.187788] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0635bdf8-f974-4597-82bd-646bb4c0d36b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2874.218394] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04e22e08-75b4-46b9-b7f4-921e442b816c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2874.225821] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bf9b592-27d4-42a4-8c0d-b50d7460c98e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2874.238765] env[61215]: DEBUG nova.compute.provider_tree [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2874.246963] env[61215]: DEBUG nova.scheduler.client.report [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2874.261806] env[61215]: DEBUG oslo_concurrency.lockutils [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 3.542s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2874.262365] env[61215]: ERROR nova.compute.manager [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2874.262365] env[61215]: Faults: ['InvalidArgument'] [ 2874.262365] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Traceback (most recent call last): [ 2874.262365] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2874.262365] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] self.driver.spawn(context, instance, image_meta, [ 2874.262365] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2874.262365] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2874.262365] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2874.262365] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] self._fetch_image_if_missing(context, vi) [ 2874.262365] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2874.262365] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] image_cache(vi, tmp_image_ds_loc) [ 2874.262365] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2874.262365] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] vm_util.copy_virtual_disk( [ 2874.262365] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2874.262365] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] session._wait_for_task(vmdk_copy_task) [ 2874.262365] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2874.262365] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] return self.wait_for_task(task_ref) [ 2874.262365] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2874.262365] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] return evt.wait() [ 2874.262365] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2874.262365] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] result = hub.switch() [ 2874.262365] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2874.262365] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] return self.greenlet.switch() [ 2874.262365] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2874.262365] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] self.f(*self.args, **self.kw) [ 2874.262365] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2874.262365] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] raise exceptions.translate_fault(task_info.error) [ 2874.262365] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2874.262365] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Faults: ['InvalidArgument'] [ 2874.262365] env[61215]: ERROR nova.compute.manager [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] [ 2874.263161] env[61215]: DEBUG nova.compute.utils [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] VimFaultException {{(pid=61215) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2874.264543] env[61215]: DEBUG nova.compute.manager [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Build of instance 81e63102-75dc-4f4b-9b48-a63b2a9123f2 was re-scheduled: A specified parameter was not correct: fileType [ 2874.264543] env[61215]: Faults: ['InvalidArgument'] {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2874.264911] env[61215]: DEBUG nova.compute.manager [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Unplugging VIFs for instance {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2874.265102] env[61215]: DEBUG nova.compute.manager [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2874.265282] env[61215]: DEBUG nova.compute.manager [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2874.265451] env[61215]: DEBUG nova.network.neutron [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2874.568343] env[61215]: DEBUG nova.network.neutron [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2874.585465] env[61215]: INFO nova.compute.manager [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Took 0.32 seconds to deallocate network for instance. [ 2874.684181] env[61215]: INFO nova.scheduler.client.report [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Deleted allocations for instance 81e63102-75dc-4f4b-9b48-a63b2a9123f2 [ 2874.713604] env[61215]: DEBUG oslo_concurrency.lockutils [None req-391eb06f-690c-4bb3-863e-e34b8a6ef7ac tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Lock "81e63102-75dc-4f4b-9b48-a63b2a9123f2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 546.214s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2874.713897] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "81e63102-75dc-4f4b-9b48-a63b2a9123f2" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 432.717s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2874.714153] env[61215]: INFO nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] During sync_power_state the instance has a pending task (spawning). Skip. [ 2874.714415] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "81e63102-75dc-4f4b-9b48-a63b2a9123f2" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2874.714984] env[61215]: DEBUG oslo_concurrency.lockutils [None req-72b6c2ba-30e2-48ef-9935-532d1f67ab15 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Lock "81e63102-75dc-4f4b-9b48-a63b2a9123f2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 349.778s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2874.715263] env[61215]: DEBUG oslo_concurrency.lockutils [None req-72b6c2ba-30e2-48ef-9935-532d1f67ab15 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Acquiring lock "81e63102-75dc-4f4b-9b48-a63b2a9123f2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2874.715516] env[61215]: DEBUG oslo_concurrency.lockutils [None req-72b6c2ba-30e2-48ef-9935-532d1f67ab15 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Lock "81e63102-75dc-4f4b-9b48-a63b2a9123f2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2874.715709] env[61215]: DEBUG oslo_concurrency.lockutils [None req-72b6c2ba-30e2-48ef-9935-532d1f67ab15 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Lock "81e63102-75dc-4f4b-9b48-a63b2a9123f2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2874.718324] env[61215]: INFO nova.compute.manager [None req-72b6c2ba-30e2-48ef-9935-532d1f67ab15 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Terminating instance [ 2874.720129] env[61215]: DEBUG nova.compute.manager [None req-72b6c2ba-30e2-48ef-9935-532d1f67ab15 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2874.720330] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-72b6c2ba-30e2-48ef-9935-532d1f67ab15 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2874.720592] env[61215]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5a7270da-6974-4c3c-8dbc-63c8a3c3a998 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2874.730362] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-607f3dbc-72ee-4499-bd55-49fd031105f4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2874.755030] env[61215]: WARNING nova.virt.vmwareapi.vmops [None req-72b6c2ba-30e2-48ef-9935-532d1f67ab15 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 81e63102-75dc-4f4b-9b48-a63b2a9123f2 could not be found. [ 2874.755240] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-72b6c2ba-30e2-48ef-9935-532d1f67ab15 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2874.755424] env[61215]: INFO nova.compute.manager [None req-72b6c2ba-30e2-48ef-9935-532d1f67ab15 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2874.755669] env[61215]: DEBUG oslo.service.loopingcall [None req-72b6c2ba-30e2-48ef-9935-532d1f67ab15 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2874.755884] env[61215]: DEBUG nova.compute.manager [-] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2874.755980] env[61215]: DEBUG nova.network.neutron [-] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2874.780820] env[61215]: DEBUG nova.network.neutron [-] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2874.787886] env[61215]: INFO nova.compute.manager [-] [instance: 81e63102-75dc-4f4b-9b48-a63b2a9123f2] Took 0.03 seconds to deallocate network for instance. [ 2874.877040] env[61215]: DEBUG oslo_concurrency.lockutils [None req-72b6c2ba-30e2-48ef-9935-532d1f67ab15 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Lock "81e63102-75dc-4f4b-9b48-a63b2a9123f2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.162s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2877.784805] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquiring lock "039b406e-bc8c-41f9-a0d9-39d845b94a3f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2877.785132] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Lock "039b406e-bc8c-41f9-a0d9-39d845b94a3f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2877.795564] env[61215]: DEBUG nova.compute.manager [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2877.842055] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2877.842382] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2877.843832] env[61215]: INFO nova.compute.claims [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2877.943323] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4da802c3-c37e-49c6-b481-e0d5ef351c19 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2877.951191] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2d7f933-376d-4cbf-b01f-176bd4e91d6a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2877.980762] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29f50ecd-598d-44d7-9b88-1b6c6a488a3f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2877.987993] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3342bf45-4aa8-440c-b199-9c4bdd5ce83c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2878.001039] env[61215]: DEBUG nova.compute.provider_tree [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2878.011037] env[61215]: DEBUG nova.scheduler.client.report [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2878.023779] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.181s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2878.024247] env[61215]: DEBUG nova.compute.manager [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Start building networks asynchronously for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2878.056781] env[61215]: DEBUG nova.compute.utils [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Using /dev/sd instead of None {{(pid=61215) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2878.059074] env[61215]: DEBUG nova.compute.manager [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Allocating IP information in the background. {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2878.059074] env[61215]: DEBUG nova.network.neutron [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] allocate_for_instance() {{(pid=61215) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2878.067996] env[61215]: DEBUG nova.compute.manager [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Start building block device mappings for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2878.134125] env[61215]: DEBUG nova.compute.manager [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Start spawning the instance on the hypervisor. {{(pid=61215) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2878.161403] env[61215]: DEBUG nova.virt.hardware [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:05:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:04:45Z,direct_url=,disk_format='vmdk',id=e91f0c25-9ff9-4937-8440-f47cfb2028bc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9bc3f57c82894c5d9e08b66e77a25dc5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:04:46Z,virtual_size=,visibility=), allow threads: False {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2878.161683] env[61215]: DEBUG nova.virt.hardware [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Flavor limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2878.161866] env[61215]: DEBUG nova.virt.hardware [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Image limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2878.162129] env[61215]: DEBUG nova.virt.hardware [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Flavor pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2878.162304] env[61215]: DEBUG nova.virt.hardware [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Image pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2878.162482] env[61215]: DEBUG nova.virt.hardware [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2878.162699] env[61215]: DEBUG nova.virt.hardware [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2878.162883] env[61215]: DEBUG nova.virt.hardware [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2878.163106] env[61215]: DEBUG nova.virt.hardware [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Got 1 possible topologies {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2878.163355] env[61215]: DEBUG nova.virt.hardware [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2878.163554] env[61215]: DEBUG nova.virt.hardware [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2878.164426] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ff0ef40-2708-453c-9b5e-957187349883 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2878.168210] env[61215]: DEBUG nova.policy [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c9db5492250b426c80f611d7a5686c1b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd3eac98da0cb41cbad12d92e9151b143', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61215) authorize /opt/stack/nova/nova/policy.py:203}} [ 2878.175491] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6e12387-6353-4313-8b45-82eeb0e2052d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2878.486863] env[61215]: DEBUG nova.network.neutron [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Successfully created port: 8e2754fd-926d-4383-a170-4ff2d654e52e {{(pid=61215) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2879.047520] env[61215]: DEBUG nova.compute.manager [req-796a2dd2-6ddd-4313-aacd-144bbcfe8621 req-ce5d228a-11a2-4317-bb97-ec41a795948a service nova] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Received event network-vif-plugged-8e2754fd-926d-4383-a170-4ff2d654e52e {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 2879.047757] env[61215]: DEBUG oslo_concurrency.lockutils [req-796a2dd2-6ddd-4313-aacd-144bbcfe8621 req-ce5d228a-11a2-4317-bb97-ec41a795948a service nova] Acquiring lock "039b406e-bc8c-41f9-a0d9-39d845b94a3f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2879.047953] env[61215]: DEBUG oslo_concurrency.lockutils [req-796a2dd2-6ddd-4313-aacd-144bbcfe8621 req-ce5d228a-11a2-4317-bb97-ec41a795948a service nova] Lock "039b406e-bc8c-41f9-a0d9-39d845b94a3f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2879.048146] env[61215]: DEBUG oslo_concurrency.lockutils [req-796a2dd2-6ddd-4313-aacd-144bbcfe8621 req-ce5d228a-11a2-4317-bb97-ec41a795948a service nova] Lock "039b406e-bc8c-41f9-a0d9-39d845b94a3f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2879.048328] env[61215]: DEBUG nova.compute.manager [req-796a2dd2-6ddd-4313-aacd-144bbcfe8621 req-ce5d228a-11a2-4317-bb97-ec41a795948a service nova] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] No waiting events found dispatching network-vif-plugged-8e2754fd-926d-4383-a170-4ff2d654e52e {{(pid=61215) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2879.048496] env[61215]: WARNING nova.compute.manager [req-796a2dd2-6ddd-4313-aacd-144bbcfe8621 req-ce5d228a-11a2-4317-bb97-ec41a795948a service nova] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Received unexpected event network-vif-plugged-8e2754fd-926d-4383-a170-4ff2d654e52e for instance with vm_state building and task_state spawning. [ 2879.104279] env[61215]: DEBUG nova.network.neutron [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Successfully updated port: 8e2754fd-926d-4383-a170-4ff2d654e52e {{(pid=61215) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2879.120276] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquiring lock "refresh_cache-039b406e-bc8c-41f9-a0d9-39d845b94a3f" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2879.120524] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquired lock "refresh_cache-039b406e-bc8c-41f9-a0d9-39d845b94a3f" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2879.120746] env[61215]: DEBUG nova.network.neutron [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2879.163316] env[61215]: DEBUG nova.network.neutron [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2879.380700] env[61215]: DEBUG nova.network.neutron [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Updating instance_info_cache with network_info: [{"id": "8e2754fd-926d-4383-a170-4ff2d654e52e", "address": "fa:16:3e:f9:4e:0c", "network": {"id": "a742dd7e-4910-4f56-91c4-28c6da7554f1", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2129807098-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3eac98da0cb41cbad12d92e9151b143", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e2754fd-92", "ovs_interfaceid": "8e2754fd-926d-4383-a170-4ff2d654e52e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2879.392543] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Releasing lock "refresh_cache-039b406e-bc8c-41f9-a0d9-39d845b94a3f" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2879.392829] env[61215]: DEBUG nova.compute.manager [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Instance network_info: |[{"id": "8e2754fd-926d-4383-a170-4ff2d654e52e", "address": "fa:16:3e:f9:4e:0c", "network": {"id": "a742dd7e-4910-4f56-91c4-28c6da7554f1", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2129807098-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3eac98da0cb41cbad12d92e9151b143", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e2754fd-92", "ovs_interfaceid": "8e2754fd-926d-4383-a170-4ff2d654e52e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2879.393261] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f9:4e:0c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b98c49ac-0eb7-4311-aa8f-60581b2ce706', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8e2754fd-926d-4383-a170-4ff2d654e52e', 'vif_model': 'vmxnet3'}] {{(pid=61215) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2879.401500] env[61215]: DEBUG oslo.service.loopingcall [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2879.401982] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Creating VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2879.402345] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-630be5f5-cb0b-4083-9bad-398172bec1a6 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2879.422655] env[61215]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2879.422655] env[61215]: value = "task-1690475" [ 2879.422655] env[61215]: _type = "Task" [ 2879.422655] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2879.430340] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690475, 'name': CreateVM_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2879.932595] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690475, 'name': CreateVM_Task} progress is 99%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2880.433087] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690475, 'name': CreateVM_Task, 'duration_secs': 0.678246} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2880.433361] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Created VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2880.433963] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2880.434152] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2880.434523] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2880.434786] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c34d165f-ddf8-46d9-b130-aec201728051 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2880.439524] env[61215]: DEBUG oslo_vmware.api [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Waiting for the task: (returnval){ [ 2880.439524] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]527931cc-cc9e-2902-7e78-bb457ef8bada" [ 2880.439524] env[61215]: _type = "Task" [ 2880.439524] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2880.448453] env[61215]: DEBUG oslo_vmware.api [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]527931cc-cc9e-2902-7e78-bb457ef8bada, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2880.950252] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2880.950541] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Processing image e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2880.950754] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2881.075780] env[61215]: DEBUG nova.compute.manager [req-5a033a5d-5960-4e8e-a9a3-44f3e1049de8 req-9bb74381-9a27-4bea-b439-99f6df7b40fc service nova] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Received event network-changed-8e2754fd-926d-4383-a170-4ff2d654e52e {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 2881.075947] env[61215]: DEBUG nova.compute.manager [req-5a033a5d-5960-4e8e-a9a3-44f3e1049de8 req-9bb74381-9a27-4bea-b439-99f6df7b40fc service nova] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Refreshing instance network info cache due to event network-changed-8e2754fd-926d-4383-a170-4ff2d654e52e. {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 2881.076236] env[61215]: DEBUG oslo_concurrency.lockutils [req-5a033a5d-5960-4e8e-a9a3-44f3e1049de8 req-9bb74381-9a27-4bea-b439-99f6df7b40fc service nova] Acquiring lock "refresh_cache-039b406e-bc8c-41f9-a0d9-39d845b94a3f" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2881.076389] env[61215]: DEBUG oslo_concurrency.lockutils [req-5a033a5d-5960-4e8e-a9a3-44f3e1049de8 req-9bb74381-9a27-4bea-b439-99f6df7b40fc service nova] Acquired lock "refresh_cache-039b406e-bc8c-41f9-a0d9-39d845b94a3f" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2881.076552] env[61215]: DEBUG nova.network.neutron [req-5a033a5d-5960-4e8e-a9a3-44f3e1049de8 req-9bb74381-9a27-4bea-b439-99f6df7b40fc service nova] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Refreshing network info cache for port 8e2754fd-926d-4383-a170-4ff2d654e52e {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2881.317155] env[61215]: DEBUG nova.network.neutron [req-5a033a5d-5960-4e8e-a9a3-44f3e1049de8 req-9bb74381-9a27-4bea-b439-99f6df7b40fc service nova] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Updated VIF entry in instance network info cache for port 8e2754fd-926d-4383-a170-4ff2d654e52e. {{(pid=61215) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2881.317581] env[61215]: DEBUG nova.network.neutron [req-5a033a5d-5960-4e8e-a9a3-44f3e1049de8 req-9bb74381-9a27-4bea-b439-99f6df7b40fc service nova] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Updating instance_info_cache with network_info: [{"id": "8e2754fd-926d-4383-a170-4ff2d654e52e", "address": "fa:16:3e:f9:4e:0c", "network": {"id": "a742dd7e-4910-4f56-91c4-28c6da7554f1", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2129807098-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3eac98da0cb41cbad12d92e9151b143", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e2754fd-92", "ovs_interfaceid": "8e2754fd-926d-4383-a170-4ff2d654e52e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2881.327933] env[61215]: DEBUG oslo_concurrency.lockutils [req-5a033a5d-5960-4e8e-a9a3-44f3e1049de8 req-9bb74381-9a27-4bea-b439-99f6df7b40fc service nova] Releasing lock "refresh_cache-039b406e-bc8c-41f9-a0d9-39d845b94a3f" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2882.663728] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2888.654629] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2892.655680] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2893.654659] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2893.654843] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61215) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 2893.655065] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2893.667630] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2893.667986] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2893.668051] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2893.668192] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61215) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2893.669308] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8199ff1-097c-4722-8df4-8ec7c6b33ea4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2893.678009] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4d0d505-2f89-4a04-93d3-2cd9a4b3bc33 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2893.691994] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87b97678-0ed2-4e23-aa01-bb5143d5a4c1 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2893.698199] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbd69cdd-9c40-45e6-8fd5-f9d50ece4169 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2893.727741] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181323MB free_disk=173GB free_vcpus=48 pci_devices=None {{(pid=61215) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2893.727903] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2893.728083] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2893.787817] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance e13fe4b8-f445-46f6-a896-8db6fd85fa71 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2893.787982] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance bc18b836-2fdc-4750-8720-b5b5433fec84 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2893.788126] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2893.788249] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 039b406e-bc8c-41f9-a0d9-39d845b94a3f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2893.788427] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2893.788584] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1024MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2893.846257] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f1eab2f-6992-4300-bcdf-48fee3ae896d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2893.853754] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c356297-e1a5-43c9-8df9-7aecd1f39eda {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2893.882865] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f0f88b7-7728-4f5a-8f86-04304dad5bbc {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2893.889702] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39195d46-e5f6-44d8-983c-4f79173fe4b3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2893.902390] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2893.910352] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2893.923530] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61215) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2893.923708] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.196s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2894.923418] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2895.654426] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2897.654913] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2897.655305] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Starting heal instance info cache {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 2897.655305] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Rebuilding the list of instances to heal {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2897.668912] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2897.669081] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2897.669217] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2897.669347] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2897.669469] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Didn't find any instances for network info cache update. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 2898.654627] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2905.651510] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2916.485338] env[61215]: DEBUG oslo_concurrency.lockutils [None req-2237b6aa-b22f-4233-b44c-4d5f2e96f289 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Acquiring lock "8ca1c8ea-ce6a-4945-873b-aa0e6489baf6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2919.601844] env[61215]: WARNING oslo_vmware.rw_handles [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2919.601844] env[61215]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2919.601844] env[61215]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2919.601844] env[61215]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2919.601844] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2919.601844] env[61215]: ERROR oslo_vmware.rw_handles response.begin() [ 2919.601844] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2919.601844] env[61215]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2919.601844] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2919.601844] env[61215]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2919.601844] env[61215]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2919.601844] env[61215]: ERROR oslo_vmware.rw_handles [ 2919.602752] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Downloaded image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to vmware_temp/e3a6b22e-ff1d-4a2c-8f74-53ec5bb0d522/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2919.604354] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Caching image {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2919.604596] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Copying Virtual Disk [datastore1] vmware_temp/e3a6b22e-ff1d-4a2c-8f74-53ec5bb0d522/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk to [datastore1] vmware_temp/e3a6b22e-ff1d-4a2c-8f74-53ec5bb0d522/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk {{(pid=61215) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2919.604881] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cb49fce6-96af-453d-9044-8607d4447f2e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2919.614168] env[61215]: DEBUG oslo_vmware.api [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Waiting for the task: (returnval){ [ 2919.614168] env[61215]: value = "task-1690476" [ 2919.614168] env[61215]: _type = "Task" [ 2919.614168] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2919.621868] env[61215]: DEBUG oslo_vmware.api [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Task: {'id': task-1690476, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2920.125088] env[61215]: DEBUG oslo_vmware.exceptions [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Fault InvalidArgument not matched. {{(pid=61215) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2920.125088] env[61215]: DEBUG oslo_concurrency.lockutils [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2920.125647] env[61215]: ERROR nova.compute.manager [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2920.125647] env[61215]: Faults: ['InvalidArgument'] [ 2920.125647] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Traceback (most recent call last): [ 2920.125647] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2920.125647] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] yield resources [ 2920.125647] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2920.125647] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] self.driver.spawn(context, instance, image_meta, [ 2920.125647] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2920.125647] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2920.125647] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2920.125647] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] self._fetch_image_if_missing(context, vi) [ 2920.125647] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2920.125647] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] image_cache(vi, tmp_image_ds_loc) [ 2920.125647] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2920.125647] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] vm_util.copy_virtual_disk( [ 2920.125647] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2920.125647] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] session._wait_for_task(vmdk_copy_task) [ 2920.125647] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2920.125647] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] return self.wait_for_task(task_ref) [ 2920.125647] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2920.125647] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] return evt.wait() [ 2920.125647] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2920.125647] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] result = hub.switch() [ 2920.125647] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2920.125647] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] return self.greenlet.switch() [ 2920.125647] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2920.125647] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] self.f(*self.args, **self.kw) [ 2920.125647] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2920.125647] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] raise exceptions.translate_fault(task_info.error) [ 2920.125647] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2920.125647] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Faults: ['InvalidArgument'] [ 2920.125647] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] [ 2920.126566] env[61215]: INFO nova.compute.manager [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Terminating instance [ 2920.127573] env[61215]: DEBUG oslo_concurrency.lockutils [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2920.127791] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2920.128041] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5a380acb-aa1e-46ed-8997-d6075a14834c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2920.130127] env[61215]: DEBUG nova.compute.manager [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2920.130328] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2920.131057] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bec76cc-7875-4bfe-b57b-e7513d28272e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2920.137594] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Unregistering the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2920.137799] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-497a80c4-e926-4198-9be8-dde3808be469 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2920.139792] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2920.139967] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61215) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2920.140926] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4e0ef7e-0c40-4443-9c0a-55a7c26cac0c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2920.145250] env[61215]: DEBUG oslo_vmware.api [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Waiting for the task: (returnval){ [ 2920.145250] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]528eafb3-0974-e134-1d21-51ee801379b9" [ 2920.145250] env[61215]: _type = "Task" [ 2920.145250] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2920.151980] env[61215]: DEBUG oslo_vmware.api [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]528eafb3-0974-e134-1d21-51ee801379b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2920.208056] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Unregistered the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2920.208268] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Deleting contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2920.208454] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Deleting the datastore file [datastore1] e13fe4b8-f445-46f6-a896-8db6fd85fa71 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2920.208766] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-927bbd53-2616-445d-b851-5ff29350bbbb {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2920.214683] env[61215]: DEBUG oslo_vmware.api [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Waiting for the task: (returnval){ [ 2920.214683] env[61215]: value = "task-1690478" [ 2920.214683] env[61215]: _type = "Task" [ 2920.214683] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2920.221885] env[61215]: DEBUG oslo_vmware.api [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Task: {'id': task-1690478, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2920.656063] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Preparing fetch location {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2920.656484] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Creating directory with path [datastore1] vmware_temp/f2f593b8-d65f-4c28-af02-2a6327ca194f/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2920.656549] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-22d4299d-a1d2-4e28-9b7b-2a9a88a66a5b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2920.667638] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Created directory with path [datastore1] vmware_temp/f2f593b8-d65f-4c28-af02-2a6327ca194f/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2920.667860] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Fetch image to [datastore1] vmware_temp/f2f593b8-d65f-4c28-af02-2a6327ca194f/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2920.668063] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to [datastore1] vmware_temp/f2f593b8-d65f-4c28-af02-2a6327ca194f/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2920.668769] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a59e7490-9249-49ee-a4d0-e6cd1cf73e27 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2920.675337] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9290392f-571a-4074-91bd-c1f33aecf71e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2920.684095] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8220556a-217e-4d52-b8b9-506f44c284ff {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2920.715556] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cb0ac1f-a595-4fcc-9e08-fbb84b5d7bfc {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2920.725634] env[61215]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6d6cbcf0-d0c0-45be-be59-8fd0ae3753c1 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2920.727251] env[61215]: DEBUG oslo_vmware.api [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Task: {'id': task-1690478, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.087939} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2920.727484] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2920.727664] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Deleted contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2920.727835] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2920.728082] env[61215]: INFO nova.compute.manager [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2920.730161] env[61215]: DEBUG nova.compute.claims [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Aborting claim: {{(pid=61215) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2920.730334] env[61215]: DEBUG oslo_concurrency.lockutils [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2920.730551] env[61215]: DEBUG oslo_concurrency.lockutils [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2920.752358] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2920.804267] env[61215]: DEBUG oslo_vmware.rw_handles [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f2f593b8-d65f-4c28-af02-2a6327ca194f/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2920.868176] env[61215]: DEBUG oslo_vmware.rw_handles [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Completed reading data from the image iterator. {{(pid=61215) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2920.868369] env[61215]: DEBUG oslo_vmware.rw_handles [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f2f593b8-d65f-4c28-af02-2a6327ca194f/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2920.876694] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d33b0b6-8152-426e-b341-fcf3e1532ca1 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2920.884434] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a4f99fa-8845-4fb5-921b-aa098f0fd609 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2920.917075] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4780fb3b-8dbd-49a1-be0f-58dcf6c1166b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2920.923207] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c33af498-6574-4481-8544-d9b8c832360d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2920.935986] env[61215]: DEBUG nova.compute.provider_tree [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2920.944547] env[61215]: DEBUG nova.scheduler.client.report [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2920.958161] env[61215]: DEBUG oslo_concurrency.lockutils [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.227s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2920.958701] env[61215]: ERROR nova.compute.manager [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2920.958701] env[61215]: Faults: ['InvalidArgument'] [ 2920.958701] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Traceback (most recent call last): [ 2920.958701] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2920.958701] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] self.driver.spawn(context, instance, image_meta, [ 2920.958701] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2920.958701] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2920.958701] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2920.958701] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] self._fetch_image_if_missing(context, vi) [ 2920.958701] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2920.958701] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] image_cache(vi, tmp_image_ds_loc) [ 2920.958701] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2920.958701] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] vm_util.copy_virtual_disk( [ 2920.958701] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2920.958701] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] session._wait_for_task(vmdk_copy_task) [ 2920.958701] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2920.958701] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] return self.wait_for_task(task_ref) [ 2920.958701] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2920.958701] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] return evt.wait() [ 2920.958701] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2920.958701] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] result = hub.switch() [ 2920.958701] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2920.958701] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] return self.greenlet.switch() [ 2920.958701] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2920.958701] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] self.f(*self.args, **self.kw) [ 2920.958701] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2920.958701] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] raise exceptions.translate_fault(task_info.error) [ 2920.958701] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2920.958701] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Faults: ['InvalidArgument'] [ 2920.958701] env[61215]: ERROR nova.compute.manager [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] [ 2920.959509] env[61215]: DEBUG nova.compute.utils [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] VimFaultException {{(pid=61215) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2920.961140] env[61215]: DEBUG nova.compute.manager [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Build of instance e13fe4b8-f445-46f6-a896-8db6fd85fa71 was re-scheduled: A specified parameter was not correct: fileType [ 2920.961140] env[61215]: Faults: ['InvalidArgument'] {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2920.962340] env[61215]: DEBUG nova.compute.manager [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Unplugging VIFs for instance {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2920.962340] env[61215]: DEBUG nova.compute.manager [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2920.962340] env[61215]: DEBUG nova.compute.manager [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2920.962340] env[61215]: DEBUG nova.network.neutron [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2921.258480] env[61215]: DEBUG nova.network.neutron [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2921.271602] env[61215]: INFO nova.compute.manager [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Took 0.31 seconds to deallocate network for instance. [ 2921.373038] env[61215]: INFO nova.scheduler.client.report [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Deleted allocations for instance e13fe4b8-f445-46f6-a896-8db6fd85fa71 [ 2921.394942] env[61215]: DEBUG oslo_concurrency.lockutils [None req-934821a6-9547-4367-bd19-8ee7f47b2306 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Lock "e13fe4b8-f445-46f6-a896-8db6fd85fa71" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 437.552s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2921.395231] env[61215]: DEBUG oslo_concurrency.lockutils [None req-725f7bb5-a0d1-4ac9-b25b-a38482772ce2 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Lock "e13fe4b8-f445-46f6-a896-8db6fd85fa71" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 241.100s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2921.395456] env[61215]: DEBUG oslo_concurrency.lockutils [None req-725f7bb5-a0d1-4ac9-b25b-a38482772ce2 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquiring lock "e13fe4b8-f445-46f6-a896-8db6fd85fa71-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2921.395664] env[61215]: DEBUG oslo_concurrency.lockutils [None req-725f7bb5-a0d1-4ac9-b25b-a38482772ce2 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Lock "e13fe4b8-f445-46f6-a896-8db6fd85fa71-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2921.395837] env[61215]: DEBUG oslo_concurrency.lockutils [None req-725f7bb5-a0d1-4ac9-b25b-a38482772ce2 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Lock "e13fe4b8-f445-46f6-a896-8db6fd85fa71-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2921.398925] env[61215]: INFO nova.compute.manager [None req-725f7bb5-a0d1-4ac9-b25b-a38482772ce2 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Terminating instance [ 2921.400960] env[61215]: DEBUG nova.compute.manager [None req-725f7bb5-a0d1-4ac9-b25b-a38482772ce2 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2921.401185] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-725f7bb5-a0d1-4ac9-b25b-a38482772ce2 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2921.401448] env[61215]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-337adcb1-22a3-4e6e-83c8-f55c06d14f61 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2921.410295] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31998365-aa37-445c-b40d-cca3649c7b17 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2921.433694] env[61215]: WARNING nova.virt.vmwareapi.vmops [None req-725f7bb5-a0d1-4ac9-b25b-a38482772ce2 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e13fe4b8-f445-46f6-a896-8db6fd85fa71 could not be found. [ 2921.433902] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-725f7bb5-a0d1-4ac9-b25b-a38482772ce2 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2921.434096] env[61215]: INFO nova.compute.manager [None req-725f7bb5-a0d1-4ac9-b25b-a38482772ce2 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Took 0.03 seconds to destroy the instance on the hypervisor. [ 2921.434338] env[61215]: DEBUG oslo.service.loopingcall [None req-725f7bb5-a0d1-4ac9-b25b-a38482772ce2 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2921.434552] env[61215]: DEBUG nova.compute.manager [-] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2921.434649] env[61215]: DEBUG nova.network.neutron [-] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2921.459622] env[61215]: DEBUG nova.network.neutron [-] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2921.467262] env[61215]: INFO nova.compute.manager [-] [instance: e13fe4b8-f445-46f6-a896-8db6fd85fa71] Took 0.03 seconds to deallocate network for instance. [ 2921.546338] env[61215]: DEBUG oslo_concurrency.lockutils [None req-725f7bb5-a0d1-4ac9-b25b-a38482772ce2 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Lock "e13fe4b8-f445-46f6-a896-8db6fd85fa71" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.151s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2944.024316] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2949.654601] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2953.656513] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2953.656885] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2953.668137] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2953.668356] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2953.668518] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2953.668676] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61215) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2953.669814] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cba7cc1-3844-45c7-b55b-2f203f4b23d5 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2953.678554] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49cbd367-2ad5-468b-8cf8-b7706b5251a7 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2953.691979] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03975a3d-1c07-4b17-8a55-ed1fd41747c0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2953.697776] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d63402d4-1ff1-4494-9a8e-a860f23de439 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2953.726421] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181323MB free_disk=173GB free_vcpus=48 pci_devices=None {{(pid=61215) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2953.726555] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2953.726746] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2953.777025] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance bc18b836-2fdc-4750-8720-b5b5433fec84 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2953.777197] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2953.777331] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Instance 039b406e-bc8c-41f9-a0d9-39d845b94a3f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61215) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2953.777510] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2953.777656] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=61215) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2953.827602] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d35933b-4ef2-42f2-83f8-2448a2a55d6c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2953.834941] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6c0c43b-4f07-42fa-ab43-2e2a3f81f6ff {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2953.866132] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d4815ca-2a07-4ef3-8e8f-4bb028bcc286 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2953.873165] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2ea86b6-5b57-4bcd-927c-a6ffb0f5b6ab {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2953.885785] env[61215]: DEBUG nova.compute.provider_tree [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2953.893747] env[61215]: DEBUG nova.scheduler.client.report [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2953.909280] env[61215]: DEBUG nova.compute.resource_tracker [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61215) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2953.909400] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.183s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2955.907589] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2955.908038] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61215) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10539}} [ 2956.655494] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2957.654248] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2959.655684] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2959.656097] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Starting heal instance info cache {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9920}} [ 2959.656097] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Rebuilding the list of instances to heal {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2959.668393] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2959.668546] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2959.668680] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Skipping network cache update for instance because it is Building. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2959.668807] env[61215]: DEBUG nova.compute.manager [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Didn't find any instances for network info cache update. {{(pid=61215) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10006}} [ 2960.654610] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2966.577319] env[61215]: WARNING oslo_vmware.rw_handles [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2966.577319] env[61215]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2966.577319] env[61215]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2966.577319] env[61215]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2966.577319] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2966.577319] env[61215]: ERROR oslo_vmware.rw_handles response.begin() [ 2966.577319] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2966.577319] env[61215]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2966.577319] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2966.577319] env[61215]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2966.577319] env[61215]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2966.577319] env[61215]: ERROR oslo_vmware.rw_handles [ 2966.577998] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Downloaded image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to vmware_temp/f2f593b8-d65f-4c28-af02-2a6327ca194f/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2966.579639] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Caching image {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2966.579884] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Copying Virtual Disk [datastore1] vmware_temp/f2f593b8-d65f-4c28-af02-2a6327ca194f/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk to [datastore1] vmware_temp/f2f593b8-d65f-4c28-af02-2a6327ca194f/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk {{(pid=61215) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2966.580180] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-11f43341-fda3-404e-a5ea-3da675925403 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2966.588414] env[61215]: DEBUG oslo_vmware.api [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Waiting for the task: (returnval){ [ 2966.588414] env[61215]: value = "task-1690479" [ 2966.588414] env[61215]: _type = "Task" [ 2966.588414] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2966.596199] env[61215]: DEBUG oslo_vmware.api [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Task: {'id': task-1690479, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2967.099352] env[61215]: DEBUG oslo_vmware.exceptions [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Fault InvalidArgument not matched. {{(pid=61215) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2967.099491] env[61215]: DEBUG oslo_concurrency.lockutils [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2967.099937] env[61215]: ERROR nova.compute.manager [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2967.099937] env[61215]: Faults: ['InvalidArgument'] [ 2967.099937] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Traceback (most recent call last): [ 2967.099937] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 2967.099937] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] yield resources [ 2967.099937] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2967.099937] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] self.driver.spawn(context, instance, image_meta, [ 2967.099937] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2967.099937] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2967.099937] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2967.099937] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] self._fetch_image_if_missing(context, vi) [ 2967.099937] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2967.099937] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] image_cache(vi, tmp_image_ds_loc) [ 2967.099937] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2967.099937] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] vm_util.copy_virtual_disk( [ 2967.099937] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2967.099937] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] session._wait_for_task(vmdk_copy_task) [ 2967.099937] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2967.099937] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] return self.wait_for_task(task_ref) [ 2967.099937] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2967.099937] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] return evt.wait() [ 2967.099937] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2967.099937] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] result = hub.switch() [ 2967.099937] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2967.099937] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] return self.greenlet.switch() [ 2967.099937] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2967.099937] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] self.f(*self.args, **self.kw) [ 2967.099937] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2967.099937] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] raise exceptions.translate_fault(task_info.error) [ 2967.099937] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2967.099937] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Faults: ['InvalidArgument'] [ 2967.099937] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] [ 2967.100998] env[61215]: INFO nova.compute.manager [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Terminating instance [ 2967.101922] env[61215]: DEBUG oslo_concurrency.lockutils [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2967.102152] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2967.102401] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f842bae9-2ea7-4753-ba97-05d96db11632 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2967.104731] env[61215]: DEBUG nova.compute.manager [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2967.104934] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2967.105686] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91931051-535e-46ba-b437-4b48d708a120 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2967.112995] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Unregistering the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2967.113260] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2e9ec200-5862-45d2-8a49-412d49cdc295 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2967.115347] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2967.115525] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61215) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2967.116513] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c22e8bb1-36c9-4b0b-aa53-387f2f083d74 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2967.121706] env[61215]: DEBUG oslo_vmware.api [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Waiting for the task: (returnval){ [ 2967.121706] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52e14e08-ad24-847b-53b4-6634000388db" [ 2967.121706] env[61215]: _type = "Task" [ 2967.121706] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2967.130241] env[61215]: DEBUG oslo_vmware.api [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52e14e08-ad24-847b-53b4-6634000388db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2967.184602] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Unregistered the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2967.184820] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Deleting contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2967.185018] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Deleting the datastore file [datastore1] bc18b836-2fdc-4750-8720-b5b5433fec84 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2967.185310] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-57f1e069-b9d1-4c0a-bbeb-d05e3d7c24a9 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2967.191236] env[61215]: DEBUG oslo_vmware.api [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Waiting for the task: (returnval){ [ 2967.191236] env[61215]: value = "task-1690481" [ 2967.191236] env[61215]: _type = "Task" [ 2967.191236] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2967.199286] env[61215]: DEBUG oslo_vmware.api [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Task: {'id': task-1690481, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2967.631993] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Preparing fetch location {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2967.632350] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Creating directory with path [datastore1] vmware_temp/403b43ee-6690-40d8-9e85-d6fd5f4f3850/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2967.632499] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-41499b8d-9721-4080-95c2-a17c420d8b23 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2967.644183] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Created directory with path [datastore1] vmware_temp/403b43ee-6690-40d8-9e85-d6fd5f4f3850/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2967.644365] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Fetch image to [datastore1] vmware_temp/403b43ee-6690-40d8-9e85-d6fd5f4f3850/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2967.644537] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to [datastore1] vmware_temp/403b43ee-6690-40d8-9e85-d6fd5f4f3850/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2967.645244] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9bcdac7-4090-4840-a943-27d36e4ed149 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2967.651604] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-982257ac-1e9f-47b5-b64b-b1b84537090d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2967.660435] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ed56b70-fc39-48d8-aa29-e0ca951346eb {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2967.691350] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f987d072-d1c3-4072-8819-75399a06cab7 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2967.701875] env[61215]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1534877c-9b0f-4ec2-9673-f3e3fc6824c6 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2967.703466] env[61215]: DEBUG oslo_vmware.api [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Task: {'id': task-1690481, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075815} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2967.703702] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2967.703885] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Deleted contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2967.704074] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2967.704257] env[61215]: INFO nova.compute.manager [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2967.706364] env[61215]: DEBUG nova.compute.claims [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Aborting claim: {{(pid=61215) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2967.706543] env[61215]: DEBUG oslo_concurrency.lockutils [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2967.706753] env[61215]: DEBUG oslo_concurrency.lockutils [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2967.725158] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2967.781579] env[61215]: DEBUG oslo_vmware.rw_handles [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/403b43ee-6690-40d8-9e85-d6fd5f4f3850/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2967.843652] env[61215]: DEBUG oslo_vmware.rw_handles [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Completed reading data from the image iterator. {{(pid=61215) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2967.843835] env[61215]: DEBUG oslo_vmware.rw_handles [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/403b43ee-6690-40d8-9e85-d6fd5f4f3850/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2967.852084] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f49d04b1-7140-4917-9531-f883023c3261 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2967.859618] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eab30b9d-b045-49e4-b338-45e7da20d695 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2967.891096] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-784a7043-bf23-4536-9c70-07e14864e973 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2967.898222] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7195296-518e-411b-9166-5fec6f1e1604 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2967.911758] env[61215]: DEBUG nova.compute.provider_tree [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2967.920137] env[61215]: DEBUG nova.scheduler.client.report [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2967.934420] env[61215]: DEBUG oslo_concurrency.lockutils [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.228s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2967.934932] env[61215]: ERROR nova.compute.manager [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2967.934932] env[61215]: Faults: ['InvalidArgument'] [ 2967.934932] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Traceback (most recent call last): [ 2967.934932] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 2967.934932] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] self.driver.spawn(context, instance, image_meta, [ 2967.934932] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2967.934932] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2967.934932] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2967.934932] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] self._fetch_image_if_missing(context, vi) [ 2967.934932] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2967.934932] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] image_cache(vi, tmp_image_ds_loc) [ 2967.934932] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2967.934932] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] vm_util.copy_virtual_disk( [ 2967.934932] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2967.934932] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] session._wait_for_task(vmdk_copy_task) [ 2967.934932] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2967.934932] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] return self.wait_for_task(task_ref) [ 2967.934932] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2967.934932] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] return evt.wait() [ 2967.934932] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2967.934932] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] result = hub.switch() [ 2967.934932] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2967.934932] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] return self.greenlet.switch() [ 2967.934932] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2967.934932] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] self.f(*self.args, **self.kw) [ 2967.934932] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2967.934932] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] raise exceptions.translate_fault(task_info.error) [ 2967.934932] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2967.934932] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Faults: ['InvalidArgument'] [ 2967.934932] env[61215]: ERROR nova.compute.manager [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] [ 2967.935786] env[61215]: DEBUG nova.compute.utils [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] VimFaultException {{(pid=61215) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2967.937060] env[61215]: DEBUG nova.compute.manager [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Build of instance bc18b836-2fdc-4750-8720-b5b5433fec84 was re-scheduled: A specified parameter was not correct: fileType [ 2967.937060] env[61215]: Faults: ['InvalidArgument'] {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 2967.937432] env[61215]: DEBUG nova.compute.manager [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Unplugging VIFs for instance {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 2967.937611] env[61215]: DEBUG nova.compute.manager [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 2967.937787] env[61215]: DEBUG nova.compute.manager [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2967.937968] env[61215]: DEBUG nova.network.neutron [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2968.207533] env[61215]: DEBUG nova.network.neutron [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2968.223024] env[61215]: INFO nova.compute.manager [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Took 0.28 seconds to deallocate network for instance. [ 2968.309046] env[61215]: INFO nova.scheduler.client.report [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Deleted allocations for instance bc18b836-2fdc-4750-8720-b5b5433fec84 [ 2968.332141] env[61215]: DEBUG oslo_concurrency.lockutils [None req-b2c44339-a36d-4392-92aa-b05f0e83fa20 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Lock "bc18b836-2fdc-4750-8720-b5b5433fec84" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 301.109s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2968.332447] env[61215]: DEBUG oslo_concurrency.lockutils [None req-b0990d90-15d6-40cd-9e3d-71d08a9eddd7 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Lock "bc18b836-2fdc-4750-8720-b5b5433fec84" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 103.622s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2968.332674] env[61215]: DEBUG oslo_concurrency.lockutils [None req-b0990d90-15d6-40cd-9e3d-71d08a9eddd7 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Acquiring lock "bc18b836-2fdc-4750-8720-b5b5433fec84-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2968.332884] env[61215]: DEBUG oslo_concurrency.lockutils [None req-b0990d90-15d6-40cd-9e3d-71d08a9eddd7 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Lock "bc18b836-2fdc-4750-8720-b5b5433fec84-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2968.333095] env[61215]: DEBUG oslo_concurrency.lockutils [None req-b0990d90-15d6-40cd-9e3d-71d08a9eddd7 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Lock "bc18b836-2fdc-4750-8720-b5b5433fec84-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2968.335344] env[61215]: INFO nova.compute.manager [None req-b0990d90-15d6-40cd-9e3d-71d08a9eddd7 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Terminating instance [ 2968.337082] env[61215]: DEBUG nova.compute.manager [None req-b0990d90-15d6-40cd-9e3d-71d08a9eddd7 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 2968.337392] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-b0990d90-15d6-40cd-9e3d-71d08a9eddd7 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2968.337779] env[61215]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8c0c654a-5886-4e5e-9d3a-00e1defc959d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2968.348948] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba2a7afc-2ceb-4df3-b13f-b23ffbe36787 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2968.372814] env[61215]: WARNING nova.virt.vmwareapi.vmops [None req-b0990d90-15d6-40cd-9e3d-71d08a9eddd7 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bc18b836-2fdc-4750-8720-b5b5433fec84 could not be found. [ 2968.373124] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-b0990d90-15d6-40cd-9e3d-71d08a9eddd7 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2968.373328] env[61215]: INFO nova.compute.manager [None req-b0990d90-15d6-40cd-9e3d-71d08a9eddd7 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2968.373575] env[61215]: DEBUG oslo.service.loopingcall [None req-b0990d90-15d6-40cd-9e3d-71d08a9eddd7 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2968.373782] env[61215]: DEBUG nova.compute.manager [-] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 2968.373880] env[61215]: DEBUG nova.network.neutron [-] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2968.397918] env[61215]: DEBUG nova.network.neutron [-] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2968.405643] env[61215]: INFO nova.compute.manager [-] [instance: bc18b836-2fdc-4750-8720-b5b5433fec84] Took 0.03 seconds to deallocate network for instance. [ 2968.487145] env[61215]: DEBUG oslo_concurrency.lockutils [None req-b0990d90-15d6-40cd-9e3d-71d08a9eddd7 tempest-ImagesTestJSON-1438693841 tempest-ImagesTestJSON-1438693841-project-member] Lock "bc18b836-2fdc-4750-8720-b5b5433fec84" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.155s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2974.329469] env[61215]: DEBUG oslo_concurrency.lockutils [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] Acquiring lock "2cfccfbb-62f4-4cf2-a8cf-4fc0939fbd94" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2974.329782] env[61215]: DEBUG oslo_concurrency.lockutils [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] Lock "2cfccfbb-62f4-4cf2-a8cf-4fc0939fbd94" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2974.340377] env[61215]: DEBUG nova.compute.manager [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] [instance: 2cfccfbb-62f4-4cf2-a8cf-4fc0939fbd94] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 2974.391278] env[61215]: DEBUG oslo_concurrency.lockutils [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2974.391529] env[61215]: DEBUG oslo_concurrency.lockutils [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2974.393112] env[61215]: INFO nova.compute.claims [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] [instance: 2cfccfbb-62f4-4cf2-a8cf-4fc0939fbd94] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2974.484397] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b2cfa21-1e0a-428c-8449-933f178e5f93 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2974.491702] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d588720d-a65b-429e-8ad1-ae4c09d9ca80 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2974.521284] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fc1d20b-3c11-4986-945c-69022d8bb220 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2974.528893] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be499bc2-e1e5-4ab2-b858-9e7340a8c86d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2974.543975] env[61215]: DEBUG nova.compute.provider_tree [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2974.553833] env[61215]: DEBUG nova.scheduler.client.report [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2974.568508] env[61215]: DEBUG oslo_concurrency.lockutils [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.177s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2974.568979] env[61215]: DEBUG nova.compute.manager [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] [instance: 2cfccfbb-62f4-4cf2-a8cf-4fc0939fbd94] Start building networks asynchronously for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 2974.601729] env[61215]: DEBUG nova.compute.utils [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] Using /dev/sd instead of None {{(pid=61215) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2974.603411] env[61215]: DEBUG nova.compute.manager [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] [instance: 2cfccfbb-62f4-4cf2-a8cf-4fc0939fbd94] Allocating IP information in the background. {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 2974.603636] env[61215]: DEBUG nova.network.neutron [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] [instance: 2cfccfbb-62f4-4cf2-a8cf-4fc0939fbd94] allocate_for_instance() {{(pid=61215) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2974.613248] env[61215]: DEBUG nova.compute.manager [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] [instance: 2cfccfbb-62f4-4cf2-a8cf-4fc0939fbd94] Start building block device mappings for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 2974.680943] env[61215]: DEBUG nova.compute.manager [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] [instance: 2cfccfbb-62f4-4cf2-a8cf-4fc0939fbd94] Start spawning the instance on the hypervisor. {{(pid=61215) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 2974.704436] env[61215]: DEBUG nova.policy [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'df7a1f97c1664c0da46650d6397cb0a3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '581f84a28d89415d9d9b12cad2fa8e10', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61215) authorize /opt/stack/nova/nova/policy.py:203}} [ 2974.711061] env[61215]: DEBUG nova.virt.hardware [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:05:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:04:45Z,direct_url=,disk_format='vmdk',id=e91f0c25-9ff9-4937-8440-f47cfb2028bc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9bc3f57c82894c5d9e08b66e77a25dc5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:04:46Z,virtual_size=,visibility=), allow threads: False {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2974.711311] env[61215]: DEBUG nova.virt.hardware [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] Flavor limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2974.711474] env[61215]: DEBUG nova.virt.hardware [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] Image limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2974.711658] env[61215]: DEBUG nova.virt.hardware [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] Flavor pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2974.711810] env[61215]: DEBUG nova.virt.hardware [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] Image pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2974.711963] env[61215]: DEBUG nova.virt.hardware [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2974.712196] env[61215]: DEBUG nova.virt.hardware [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2974.712359] env[61215]: DEBUG nova.virt.hardware [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2974.712527] env[61215]: DEBUG nova.virt.hardware [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] Got 1 possible topologies {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2974.713024] env[61215]: DEBUG nova.virt.hardware [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2974.713024] env[61215]: DEBUG nova.virt.hardware [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2974.713752] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33eb920c-54b6-4432-b7dd-da2eb4ff0d1d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2974.721759] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81ab1f54-c584-4741-9be1-d06567d5c6c3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2975.008634] env[61215]: DEBUG nova.network.neutron [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] [instance: 2cfccfbb-62f4-4cf2-a8cf-4fc0939fbd94] Successfully created port: 60820ed4-2123-4e55-ba61-901b23b0b290 {{(pid=61215) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2976.023363] env[61215]: DEBUG nova.compute.manager [req-658ab208-3ec2-4fe8-ab16-95bb6d57cd71 req-641681d2-d60c-4a46-b7ae-43100e74c662 service nova] [instance: 2cfccfbb-62f4-4cf2-a8cf-4fc0939fbd94] Received event network-vif-plugged-60820ed4-2123-4e55-ba61-901b23b0b290 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 2976.023363] env[61215]: DEBUG oslo_concurrency.lockutils [req-658ab208-3ec2-4fe8-ab16-95bb6d57cd71 req-641681d2-d60c-4a46-b7ae-43100e74c662 service nova] Acquiring lock "2cfccfbb-62f4-4cf2-a8cf-4fc0939fbd94-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2976.023363] env[61215]: DEBUG oslo_concurrency.lockutils [req-658ab208-3ec2-4fe8-ab16-95bb6d57cd71 req-641681d2-d60c-4a46-b7ae-43100e74c662 service nova] Lock "2cfccfbb-62f4-4cf2-a8cf-4fc0939fbd94-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2976.023363] env[61215]: DEBUG oslo_concurrency.lockutils [req-658ab208-3ec2-4fe8-ab16-95bb6d57cd71 req-641681d2-d60c-4a46-b7ae-43100e74c662 service nova] Lock "2cfccfbb-62f4-4cf2-a8cf-4fc0939fbd94-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2976.023363] env[61215]: DEBUG nova.compute.manager [req-658ab208-3ec2-4fe8-ab16-95bb6d57cd71 req-641681d2-d60c-4a46-b7ae-43100e74c662 service nova] [instance: 2cfccfbb-62f4-4cf2-a8cf-4fc0939fbd94] No waiting events found dispatching network-vif-plugged-60820ed4-2123-4e55-ba61-901b23b0b290 {{(pid=61215) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2976.024355] env[61215]: WARNING nova.compute.manager [req-658ab208-3ec2-4fe8-ab16-95bb6d57cd71 req-641681d2-d60c-4a46-b7ae-43100e74c662 service nova] [instance: 2cfccfbb-62f4-4cf2-a8cf-4fc0939fbd94] Received unexpected event network-vif-plugged-60820ed4-2123-4e55-ba61-901b23b0b290 for instance with vm_state building and task_state spawning. [ 2976.103993] env[61215]: DEBUG nova.network.neutron [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] [instance: 2cfccfbb-62f4-4cf2-a8cf-4fc0939fbd94] Successfully updated port: 60820ed4-2123-4e55-ba61-901b23b0b290 {{(pid=61215) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2976.116291] env[61215]: DEBUG oslo_concurrency.lockutils [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] Acquiring lock "refresh_cache-2cfccfbb-62f4-4cf2-a8cf-4fc0939fbd94" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2976.116291] env[61215]: DEBUG oslo_concurrency.lockutils [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] Acquired lock "refresh_cache-2cfccfbb-62f4-4cf2-a8cf-4fc0939fbd94" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2976.116291] env[61215]: DEBUG nova.network.neutron [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] [instance: 2cfccfbb-62f4-4cf2-a8cf-4fc0939fbd94] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2976.154662] env[61215]: DEBUG nova.network.neutron [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] [instance: 2cfccfbb-62f4-4cf2-a8cf-4fc0939fbd94] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2976.363296] env[61215]: DEBUG nova.network.neutron [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] [instance: 2cfccfbb-62f4-4cf2-a8cf-4fc0939fbd94] Updating instance_info_cache with network_info: [{"id": "60820ed4-2123-4e55-ba61-901b23b0b290", "address": "fa:16:3e:3a:ef:ef", "network": {"id": "3712fb99-4b30-4c13-80c4-5393a3d9d0c2", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1710432517-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "581f84a28d89415d9d9b12cad2fa8e10", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49b5df12-d801-4140-8816-2fd401608c7d", "external-id": "nsx-vlan-transportzone-326", "segmentation_id": 326, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60820ed4-21", "ovs_interfaceid": "60820ed4-2123-4e55-ba61-901b23b0b290", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2976.373959] env[61215]: DEBUG oslo_concurrency.lockutils [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] Releasing lock "refresh_cache-2cfccfbb-62f4-4cf2-a8cf-4fc0939fbd94" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2976.374289] env[61215]: DEBUG nova.compute.manager [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] [instance: 2cfccfbb-62f4-4cf2-a8cf-4fc0939fbd94] Instance network_info: |[{"id": "60820ed4-2123-4e55-ba61-901b23b0b290", "address": "fa:16:3e:3a:ef:ef", "network": {"id": "3712fb99-4b30-4c13-80c4-5393a3d9d0c2", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1710432517-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "581f84a28d89415d9d9b12cad2fa8e10", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49b5df12-d801-4140-8816-2fd401608c7d", "external-id": "nsx-vlan-transportzone-326", "segmentation_id": 326, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60820ed4-21", "ovs_interfaceid": "60820ed4-2123-4e55-ba61-901b23b0b290", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2976.374698] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] [instance: 2cfccfbb-62f4-4cf2-a8cf-4fc0939fbd94] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3a:ef:ef', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '49b5df12-d801-4140-8816-2fd401608c7d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '60820ed4-2123-4e55-ba61-901b23b0b290', 'vif_model': 'vmxnet3'}] {{(pid=61215) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2976.382468] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] Creating folder: Project (581f84a28d89415d9d9b12cad2fa8e10). Parent ref: group-v352463. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2976.382972] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dadca311-6e3b-4851-852e-fb2591ff90aa {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2976.395398] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] Created folder: Project (581f84a28d89415d9d9b12cad2fa8e10) in parent group-v352463. [ 2976.395596] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] Creating folder: Instances. Parent ref: group-v352569. {{(pid=61215) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2976.395818] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f084ce31-8999-4a65-b350-3c87f246f6ce {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2976.405546] env[61215]: INFO nova.virt.vmwareapi.vm_util [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] Created folder: Instances in parent group-v352569. [ 2976.405766] env[61215]: DEBUG oslo.service.loopingcall [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2976.405943] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2cfccfbb-62f4-4cf2-a8cf-4fc0939fbd94] Creating VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2976.406147] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-327b9d10-f25f-469f-b70b-e64ad8662abe {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2976.425179] env[61215]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2976.425179] env[61215]: value = "task-1690484" [ 2976.425179] env[61215]: _type = "Task" [ 2976.425179] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2976.432318] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690484, 'name': CreateVM_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2976.654693] env[61215]: DEBUG oslo_service.periodic_task [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Running periodic task ComputeManager._run_image_cache_manager_pass {{(pid=61215) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2976.654955] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "storage-registry-lock" by "nova.virt.storage_users.register_storage_use..do_register_storage_use" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2976.655606] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "storage-registry-lock" acquired by "nova.virt.storage_users.register_storage_use..do_register_storage_use" :: waited 0.001s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2976.655893] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "storage-registry-lock" "released" by "nova.virt.storage_users.register_storage_use..do_register_storage_use" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2976.656067] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "storage-registry-lock" by "nova.virt.storage_users.get_storage_users..do_get_storage_users" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2976.656348] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "storage-registry-lock" acquired by "nova.virt.storage_users.get_storage_users..do_get_storage_users" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2976.656601] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Lock "storage-registry-lock" "released" by "nova.virt.storage_users.get_storage_users..do_get_storage_users" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2976.674181] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb86c356-dbc7-4b32-bf8a-d4107a58ca31 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2976.684057] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90daf51a-87d0-4a50-bfc8-0413ffc31614 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2976.709220] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80d3b3c0-8b8d-415c-af8d-0b5d99d8dc24 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2976.714120] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 2976.714120] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52c7b1c2-2810-e59b-fd05-838e238c17ff" [ 2976.714120] env[61215]: _type = "Task" [ 2976.714120] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2976.721531] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52c7b1c2-2810-e59b-fd05-838e238c17ff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2976.935430] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690484, 'name': CreateVM_Task} progress is 25%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2977.248056] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52c7b1c2-2810-e59b-fd05-838e238c17ff, 'name': SearchDatastore_Task, 'duration_secs': 0.096069} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2977.248543] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/fcb315ba-b5d3-4543-8936-af7a558046fe" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2977.248740] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/fcb315ba-b5d3-4543-8936-af7a558046fe" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2977.249117] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/fcb315ba-b5d3-4543-8936-af7a558046fe" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2977.249493] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-048f1971-9f8d-45f7-b518-4c2f5533cd10 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2977.254179] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 2977.254179] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52d59e18-c2a9-3346-23c6-ee85b45cb11a" [ 2977.254179] env[61215]: _type = "Task" [ 2977.254179] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2977.261483] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52d59e18-c2a9-3346-23c6-ee85b45cb11a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2977.435484] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690484, 'name': CreateVM_Task} progress is 25%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2977.766510] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52d59e18-c2a9-3346-23c6-ee85b45cb11a, 'name': SearchDatastore_Task, 'duration_secs': 0.010338} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2977.767410] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/fcb315ba-b5d3-4543-8936-af7a558046fe is no longer used. Deleting! [ 2977.767575] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/fcb315ba-b5d3-4543-8936-af7a558046fe {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2977.767840] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1c4ea64b-3cc6-46b7-a7d5-cd7714c01910 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2977.773627] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 2977.773627] env[61215]: value = "task-1690485" [ 2977.773627] env[61215]: _type = "Task" [ 2977.773627] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2977.780875] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690485, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2977.935997] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690484, 'name': CreateVM_Task, 'duration_secs': 1.347769} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2977.936189] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2cfccfbb-62f4-4cf2-a8cf-4fc0939fbd94] Created VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2977.936865] env[61215]: DEBUG oslo_concurrency.lockutils [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2977.937048] env[61215]: DEBUG oslo_concurrency.lockutils [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2977.937377] env[61215]: DEBUG oslo_concurrency.lockutils [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2977.937633] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b31b0feb-9ad9-42fe-8d84-a7da13e31071 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2977.942697] env[61215]: DEBUG oslo_vmware.api [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] Waiting for the task: (returnval){ [ 2977.942697] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]529ebccf-cc54-96dd-c43a-2dc8a8c95d60" [ 2977.942697] env[61215]: _type = "Task" [ 2977.942697] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2977.962524] env[61215]: DEBUG oslo_concurrency.lockutils [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2977.962762] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] [instance: 2cfccfbb-62f4-4cf2-a8cf-4fc0939fbd94] Processing image e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2977.962972] env[61215]: DEBUG oslo_concurrency.lockutils [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2978.150807] env[61215]: DEBUG nova.compute.manager [req-039c5cb9-d699-41d7-b6f9-195ad9b58814 req-d3246140-016d-4d2e-a039-db34d7593ec0 service nova] [instance: 2cfccfbb-62f4-4cf2-a8cf-4fc0939fbd94] Received event network-changed-60820ed4-2123-4e55-ba61-901b23b0b290 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 2978.151031] env[61215]: DEBUG nova.compute.manager [req-039c5cb9-d699-41d7-b6f9-195ad9b58814 req-d3246140-016d-4d2e-a039-db34d7593ec0 service nova] [instance: 2cfccfbb-62f4-4cf2-a8cf-4fc0939fbd94] Refreshing instance network info cache due to event network-changed-60820ed4-2123-4e55-ba61-901b23b0b290. {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 2978.151293] env[61215]: DEBUG oslo_concurrency.lockutils [req-039c5cb9-d699-41d7-b6f9-195ad9b58814 req-d3246140-016d-4d2e-a039-db34d7593ec0 service nova] Acquiring lock "refresh_cache-2cfccfbb-62f4-4cf2-a8cf-4fc0939fbd94" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2978.151460] env[61215]: DEBUG oslo_concurrency.lockutils [req-039c5cb9-d699-41d7-b6f9-195ad9b58814 req-d3246140-016d-4d2e-a039-db34d7593ec0 service nova] Acquired lock "refresh_cache-2cfccfbb-62f4-4cf2-a8cf-4fc0939fbd94" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2978.151628] env[61215]: DEBUG nova.network.neutron [req-039c5cb9-d699-41d7-b6f9-195ad9b58814 req-d3246140-016d-4d2e-a039-db34d7593ec0 service nova] [instance: 2cfccfbb-62f4-4cf2-a8cf-4fc0939fbd94] Refreshing network info cache for port 60820ed4-2123-4e55-ba61-901b23b0b290 {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2978.284469] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690485, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.109842} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2978.284742] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2978.284742] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/fcb315ba-b5d3-4543-8936-af7a558046fe" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2978.284894] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/38bbea46-58a2-484f-98ff-92968c526399" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2978.285010] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/38bbea46-58a2-484f-98ff-92968c526399" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2978.285326] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/38bbea46-58a2-484f-98ff-92968c526399" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2978.285618] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8218f2e2-95c9-40e6-80cd-20b8e6dacb39 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2978.289916] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 2978.289916] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]5233cbdf-01e7-2ddd-6c1d-4e530862fadd" [ 2978.289916] env[61215]: _type = "Task" [ 2978.289916] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2978.297623] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5233cbdf-01e7-2ddd-6c1d-4e530862fadd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2978.419835] env[61215]: DEBUG nova.network.neutron [req-039c5cb9-d699-41d7-b6f9-195ad9b58814 req-d3246140-016d-4d2e-a039-db34d7593ec0 service nova] [instance: 2cfccfbb-62f4-4cf2-a8cf-4fc0939fbd94] Updated VIF entry in instance network info cache for port 60820ed4-2123-4e55-ba61-901b23b0b290. {{(pid=61215) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2978.420215] env[61215]: DEBUG nova.network.neutron [req-039c5cb9-d699-41d7-b6f9-195ad9b58814 req-d3246140-016d-4d2e-a039-db34d7593ec0 service nova] [instance: 2cfccfbb-62f4-4cf2-a8cf-4fc0939fbd94] Updating instance_info_cache with network_info: [{"id": "60820ed4-2123-4e55-ba61-901b23b0b290", "address": "fa:16:3e:3a:ef:ef", "network": {"id": "3712fb99-4b30-4c13-80c4-5393a3d9d0c2", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1710432517-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "581f84a28d89415d9d9b12cad2fa8e10", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49b5df12-d801-4140-8816-2fd401608c7d", "external-id": "nsx-vlan-transportzone-326", "segmentation_id": 326, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60820ed4-21", "ovs_interfaceid": "60820ed4-2123-4e55-ba61-901b23b0b290", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2978.429261] env[61215]: DEBUG oslo_concurrency.lockutils [req-039c5cb9-d699-41d7-b6f9-195ad9b58814 req-d3246140-016d-4d2e-a039-db34d7593ec0 service nova] Releasing lock "refresh_cache-2cfccfbb-62f4-4cf2-a8cf-4fc0939fbd94" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2978.800181] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5233cbdf-01e7-2ddd-6c1d-4e530862fadd, 'name': SearchDatastore_Task, 'duration_secs': 0.018522} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2978.800447] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/38bbea46-58a2-484f-98ff-92968c526399 is no longer used. Deleting! [ 2978.800597] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/38bbea46-58a2-484f-98ff-92968c526399 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2978.800862] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-73399b5c-63f4-4831-8815-44204c0e6058 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2978.807604] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 2978.807604] env[61215]: value = "task-1690486" [ 2978.807604] env[61215]: _type = "Task" [ 2978.807604] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2978.815320] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690486, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2979.318020] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690486, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.113775} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2979.318337] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2979.318431] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/38bbea46-58a2-484f-98ff-92968c526399" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2979.318656] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/24b48019-91e2-405c-8754-f8489ab8bada" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2979.318776] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/24b48019-91e2-405c-8754-f8489ab8bada" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2979.319110] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/24b48019-91e2-405c-8754-f8489ab8bada" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2979.319510] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fde27fcd-16fe-4405-a968-2ca4defb9d1e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2979.324160] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 2979.324160] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]527ac6d0-bfd9-c138-02e6-d50ea957ac82" [ 2979.324160] env[61215]: _type = "Task" [ 2979.324160] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2979.331420] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]527ac6d0-bfd9-c138-02e6-d50ea957ac82, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2979.835683] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]527ac6d0-bfd9-c138-02e6-d50ea957ac82, 'name': SearchDatastore_Task, 'duration_secs': 0.009427} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2979.835996] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/24b48019-91e2-405c-8754-f8489ab8bada is no longer used. Deleting! [ 2979.836158] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/24b48019-91e2-405c-8754-f8489ab8bada {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2979.836416] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3340d991-b64f-45d0-91c1-7a7c44cc245f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2979.842730] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 2979.842730] env[61215]: value = "task-1690487" [ 2979.842730] env[61215]: _type = "Task" [ 2979.842730] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2979.850821] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690487, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2980.352046] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690487, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.112419} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2980.352335] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2980.352456] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/24b48019-91e2-405c-8754-f8489ab8bada" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2980.352675] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/28f384a9-aef1-475a-b052-bba2a8e08a4a" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2980.352797] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/28f384a9-aef1-475a-b052-bba2a8e08a4a" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2980.353160] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/28f384a9-aef1-475a-b052-bba2a8e08a4a" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2980.353427] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ef46ff8-64f8-40b9-88a4-aded28b67bde {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2980.357547] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 2980.357547] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52af14f5-738b-7716-a534-5d63a6fa4171" [ 2980.357547] env[61215]: _type = "Task" [ 2980.357547] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2980.364773] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52af14f5-738b-7716-a534-5d63a6fa4171, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2980.868066] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52af14f5-738b-7716-a534-5d63a6fa4171, 'name': SearchDatastore_Task, 'duration_secs': 0.008988} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2980.868389] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/28f384a9-aef1-475a-b052-bba2a8e08a4a is no longer used. Deleting! [ 2980.868538] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/28f384a9-aef1-475a-b052-bba2a8e08a4a {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2980.868799] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-31bf954b-a48c-4e18-901d-361c949a65f6 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2980.875934] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 2980.875934] env[61215]: value = "task-1690488" [ 2980.875934] env[61215]: _type = "Task" [ 2980.875934] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2980.884560] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690488, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2981.385895] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690488, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.111454} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2981.386217] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2981.386249] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/28f384a9-aef1-475a-b052-bba2a8e08a4a" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2981.386462] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/ca1ae1a3-5f58-46aa-a6e8-70ff2bc411ee" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2981.386582] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/ca1ae1a3-5f58-46aa-a6e8-70ff2bc411ee" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2981.386896] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/ca1ae1a3-5f58-46aa-a6e8-70ff2bc411ee" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2981.387170] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-271f5c64-b6df-41d4-b3f4-7f8b0d2dbec9 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2981.392333] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 2981.392333] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52f0a45d-f16e-b870-4fef-c2d92567448a" [ 2981.392333] env[61215]: _type = "Task" [ 2981.392333] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2981.400201] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52f0a45d-f16e-b870-4fef-c2d92567448a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2981.903673] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52f0a45d-f16e-b870-4fef-c2d92567448a, 'name': SearchDatastore_Task, 'duration_secs': 0.011825} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2981.903995] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/ca1ae1a3-5f58-46aa-a6e8-70ff2bc411ee is no longer used. Deleting! [ 2981.904163] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/ca1ae1a3-5f58-46aa-a6e8-70ff2bc411ee {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2981.904447] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-19bb7850-b10e-4239-905e-81b1274da10f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2981.910529] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 2981.910529] env[61215]: value = "task-1690489" [ 2981.910529] env[61215]: _type = "Task" [ 2981.910529] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2981.917633] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690489, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2982.420901] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690489, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.102119} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2982.421304] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2982.421398] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/ca1ae1a3-5f58-46aa-a6e8-70ff2bc411ee" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2982.421541] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/d3541731-1339-449f-928d-402455a00829" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2982.421659] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/d3541731-1339-449f-928d-402455a00829" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2982.421985] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/d3541731-1339-449f-928d-402455a00829" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2982.422279] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e13a23cf-33fd-499e-a3d6-a7ab790bf10c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2982.426656] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 2982.426656] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52072dca-50dc-4075-9f22-df87da446a4b" [ 2982.426656] env[61215]: _type = "Task" [ 2982.426656] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2982.433948] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52072dca-50dc-4075-9f22-df87da446a4b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2982.937713] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52072dca-50dc-4075-9f22-df87da446a4b, 'name': SearchDatastore_Task, 'duration_secs': 0.008923} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2982.938042] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/d3541731-1339-449f-928d-402455a00829 is no longer used. Deleting! [ 2982.938196] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/d3541731-1339-449f-928d-402455a00829 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2982.938450] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-28ca1f73-82bf-4e52-bf1e-5f50a38962c3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2982.944069] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 2982.944069] env[61215]: value = "task-1690490" [ 2982.944069] env[61215]: _type = "Task" [ 2982.944069] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2982.951506] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690490, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2983.453939] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690490, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.099945} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2983.454369] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2983.454369] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/d3541731-1339-449f-928d-402455a00829" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2983.454597] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/33deed77-2408-4c4d-8820-0bdbee5ff694" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2983.454718] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/33deed77-2408-4c4d-8820-0bdbee5ff694" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2983.455042] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/33deed77-2408-4c4d-8820-0bdbee5ff694" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2983.455313] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ce1e1fe-aada-4de5-af90-037b7d926788 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2983.459573] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 2983.459573] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52b97c0f-21f4-9659-952a-fe0931e69374" [ 2983.459573] env[61215]: _type = "Task" [ 2983.459573] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2983.466766] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52b97c0f-21f4-9659-952a-fe0931e69374, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2983.970331] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52b97c0f-21f4-9659-952a-fe0931e69374, 'name': SearchDatastore_Task, 'duration_secs': 0.009801} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2983.970652] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/33deed77-2408-4c4d-8820-0bdbee5ff694 is no longer used. Deleting! [ 2983.970801] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/33deed77-2408-4c4d-8820-0bdbee5ff694 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2983.971081] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7544fa9c-9e0e-4712-a400-a6785db8ccd4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2983.976886] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 2983.976886] env[61215]: value = "task-1690491" [ 2983.976886] env[61215]: _type = "Task" [ 2983.976886] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2983.985301] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690491, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2984.486351] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690491, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.119116} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2984.486605] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2984.486742] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/33deed77-2408-4c4d-8820-0bdbee5ff694" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2984.486961] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/ce746a6a-2f86-4112-b2ee-e92df63d0569" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2984.487098] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/ce746a6a-2f86-4112-b2ee-e92df63d0569" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2984.487422] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/ce746a6a-2f86-4112-b2ee-e92df63d0569" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2984.487692] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52e4371c-f1c2-456b-b301-0de806a1ab38 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2984.492014] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 2984.492014] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52079db0-a3bc-5729-ed8a-2f9e87cfa7bc" [ 2984.492014] env[61215]: _type = "Task" [ 2984.492014] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2984.499164] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52079db0-a3bc-5729-ed8a-2f9e87cfa7bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2985.002234] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52079db0-a3bc-5729-ed8a-2f9e87cfa7bc, 'name': SearchDatastore_Task, 'duration_secs': 0.008758} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2985.002550] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/ce746a6a-2f86-4112-b2ee-e92df63d0569 is no longer used. Deleting! [ 2985.002695] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/ce746a6a-2f86-4112-b2ee-e92df63d0569 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2985.002980] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fe87686c-3f8d-4951-8952-bee6c0f339f4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2985.009503] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 2985.009503] env[61215]: value = "task-1690492" [ 2985.009503] env[61215]: _type = "Task" [ 2985.009503] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2985.017213] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690492, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2985.519739] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690492, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.103452} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2985.520079] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2985.520079] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/ce746a6a-2f86-4112-b2ee-e92df63d0569" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2985.520303] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/b5ea50b0-9643-4a8e-8d93-3fb3732a401c" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2985.520425] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/b5ea50b0-9643-4a8e-8d93-3fb3732a401c" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2985.520747] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/b5ea50b0-9643-4a8e-8d93-3fb3732a401c" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2985.521019] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19f0649f-ddea-4a0a-967c-74f767275b43 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2985.525438] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 2985.525438] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]527b4d5b-81ab-3e10-8f78-2b6e44155f02" [ 2985.525438] env[61215]: _type = "Task" [ 2985.525438] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2985.532930] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]527b4d5b-81ab-3e10-8f78-2b6e44155f02, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2986.036294] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]527b4d5b-81ab-3e10-8f78-2b6e44155f02, 'name': SearchDatastore_Task, 'duration_secs': 0.009272} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2986.036631] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/b5ea50b0-9643-4a8e-8d93-3fb3732a401c is no longer used. Deleting! [ 2986.036792] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/b5ea50b0-9643-4a8e-8d93-3fb3732a401c {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2986.037060] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c9d7db32-e2e0-4784-ad4d-374440713091 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2986.042735] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 2986.042735] env[61215]: value = "task-1690493" [ 2986.042735] env[61215]: _type = "Task" [ 2986.042735] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2986.054011] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690493, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2986.552324] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690493, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.097211} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2986.552698] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2986.552698] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/b5ea50b0-9643-4a8e-8d93-3fb3732a401c" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2986.552928] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/69058b66-2438-425c-a269-c06f4df296dd" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2986.553060] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/69058b66-2438-425c-a269-c06f4df296dd" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2986.553433] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/69058b66-2438-425c-a269-c06f4df296dd" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2986.553699] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b1b8b9d-71b8-4784-bfbe-d1b62022db09 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2986.557885] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 2986.557885] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]520d09d5-290d-9840-6041-1342ce7e7bfb" [ 2986.557885] env[61215]: _type = "Task" [ 2986.557885] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2986.565261] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]520d09d5-290d-9840-6041-1342ce7e7bfb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2987.068270] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]520d09d5-290d-9840-6041-1342ce7e7bfb, 'name': SearchDatastore_Task, 'duration_secs': 0.00907} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2987.068595] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/69058b66-2438-425c-a269-c06f4df296dd is no longer used. Deleting! [ 2987.068743] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/69058b66-2438-425c-a269-c06f4df296dd {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2987.069017] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4e5f5294-d29b-40da-8a97-a90c934f41ff {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2987.074720] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 2987.074720] env[61215]: value = "task-1690494" [ 2987.074720] env[61215]: _type = "Task" [ 2987.074720] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2987.081804] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690494, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2987.584886] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690494, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.103691} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2987.585331] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2987.585331] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/69058b66-2438-425c-a269-c06f4df296dd" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2987.585493] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/51808a3c-7f56-4bd2-948e-c49cf10f16c1" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2987.585614] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/51808a3c-7f56-4bd2-948e-c49cf10f16c1" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2987.585936] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/51808a3c-7f56-4bd2-948e-c49cf10f16c1" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2987.586221] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b04b97b9-bbad-4faa-9744-36d09204f673 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2987.590650] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 2987.590650] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52800ff5-ac19-42c2-b387-89aaa0826a44" [ 2987.590650] env[61215]: _type = "Task" [ 2987.590650] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2987.597860] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52800ff5-ac19-42c2-b387-89aaa0826a44, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2988.101943] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52800ff5-ac19-42c2-b387-89aaa0826a44, 'name': SearchDatastore_Task, 'duration_secs': 0.010256} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2988.102288] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/51808a3c-7f56-4bd2-948e-c49cf10f16c1 is no longer used. Deleting! [ 2988.102439] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/51808a3c-7f56-4bd2-948e-c49cf10f16c1 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2988.102701] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3721c11c-4c62-452a-9314-c2397a8d0ab3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2988.108933] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 2988.108933] env[61215]: value = "task-1690495" [ 2988.108933] env[61215]: _type = "Task" [ 2988.108933] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2988.116409] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690495, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2988.618989] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690495, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.097677} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2988.619363] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2988.619399] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/51808a3c-7f56-4bd2-948e-c49cf10f16c1" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2988.619623] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/f2beb18e-0b9b-4c89-b6b4-b81c1fa41bbf" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2988.619745] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/f2beb18e-0b9b-4c89-b6b4-b81c1fa41bbf" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2988.620091] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2beb18e-0b9b-4c89-b6b4-b81c1fa41bbf" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2988.620357] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-950a75f1-d0f5-42c8-b732-3c6c56f86f23 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2988.624523] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 2988.624523] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52f3f8af-470f-fbf7-ba64-3c6f723e32c6" [ 2988.624523] env[61215]: _type = "Task" [ 2988.624523] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2988.631762] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52f3f8af-470f-fbf7-ba64-3c6f723e32c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2989.135866] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52f3f8af-470f-fbf7-ba64-3c6f723e32c6, 'name': SearchDatastore_Task, 'duration_secs': 0.010544} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2989.136347] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/f2beb18e-0b9b-4c89-b6b4-b81c1fa41bbf is no longer used. Deleting! [ 2989.136594] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/f2beb18e-0b9b-4c89-b6b4-b81c1fa41bbf {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2989.136946] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-acf905d9-4dc1-4424-af83-6b84f5bf1b5a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2989.143847] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 2989.143847] env[61215]: value = "task-1690496" [ 2989.143847] env[61215]: _type = "Task" [ 2989.143847] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2989.154585] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690496, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2989.654173] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690496, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.097486} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2989.654552] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2989.654552] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/f2beb18e-0b9b-4c89-b6b4-b81c1fa41bbf" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2989.654786] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/b234422e-b5c3-429e-a488-fbba69e6e4d3" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2989.654900] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/b234422e-b5c3-429e-a488-fbba69e6e4d3" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2989.655252] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/b234422e-b5c3-429e-a488-fbba69e6e4d3" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2989.655551] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9fe4e7a4-0244-423b-8b1b-61228cf9c327 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2989.659960] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 2989.659960] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52311ea5-102e-2508-aa7a-0b7ede02dbda" [ 2989.659960] env[61215]: _type = "Task" [ 2989.659960] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2989.667520] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52311ea5-102e-2508-aa7a-0b7ede02dbda, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2990.170721] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52311ea5-102e-2508-aa7a-0b7ede02dbda, 'name': SearchDatastore_Task, 'duration_secs': 0.009334} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2990.170996] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/b234422e-b5c3-429e-a488-fbba69e6e4d3 is no longer used. Deleting! [ 2990.171161] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/b234422e-b5c3-429e-a488-fbba69e6e4d3 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2990.171422] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-521890c3-88c3-42b0-88a6-bd3b46c3e8d3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2990.178148] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 2990.178148] env[61215]: value = "task-1690497" [ 2990.178148] env[61215]: _type = "Task" [ 2990.178148] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2990.185139] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690497, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2990.688770] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690497, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.102978} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2990.689155] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2990.689192] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/b234422e-b5c3-429e-a488-fbba69e6e4d3" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2990.689394] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/3480d565-3193-4ac7-9c14-ea6008acb4b5" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2990.689515] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/3480d565-3193-4ac7-9c14-ea6008acb4b5" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2990.689828] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/3480d565-3193-4ac7-9c14-ea6008acb4b5" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2990.690099] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b84a4b5a-242e-4e4c-b043-478abaf1ffb2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2990.694341] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 2990.694341] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]529ef963-7d6a-74a6-d4e5-9c70211d72ac" [ 2990.694341] env[61215]: _type = "Task" [ 2990.694341] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2990.701470] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]529ef963-7d6a-74a6-d4e5-9c70211d72ac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2991.205500] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]529ef963-7d6a-74a6-d4e5-9c70211d72ac, 'name': SearchDatastore_Task, 'duration_secs': 0.009011} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2991.205835] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/3480d565-3193-4ac7-9c14-ea6008acb4b5 is no longer used. Deleting! [ 2991.205989] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/3480d565-3193-4ac7-9c14-ea6008acb4b5 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2991.206298] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4abfff36-a03f-4326-ac47-dcfbd5abcb71 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2991.212440] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 2991.212440] env[61215]: value = "task-1690498" [ 2991.212440] env[61215]: _type = "Task" [ 2991.212440] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2991.219689] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690498, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2991.722574] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690498, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.09692} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2991.722930] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2991.723067] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/3480d565-3193-4ac7-9c14-ea6008acb4b5" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2991.723296] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/606aa004-110a-4bdd-aa5b-6db84ddce29d" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2991.723420] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/606aa004-110a-4bdd-aa5b-6db84ddce29d" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2991.723731] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/606aa004-110a-4bdd-aa5b-6db84ddce29d" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2991.723985] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10da805a-8b15-4418-86f4-c62faba9050d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2991.728160] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 2991.728160] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52a5d02c-4a63-3b76-8f61-ddb4b2de56c5" [ 2991.728160] env[61215]: _type = "Task" [ 2991.728160] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2991.735778] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52a5d02c-4a63-3b76-8f61-ddb4b2de56c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2992.243929] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52a5d02c-4a63-3b76-8f61-ddb4b2de56c5, 'name': SearchDatastore_Task, 'duration_secs': 0.00902} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2992.244438] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/606aa004-110a-4bdd-aa5b-6db84ddce29d is no longer used. Deleting! [ 2992.244694] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/606aa004-110a-4bdd-aa5b-6db84ddce29d {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2992.245115] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-81f96591-c4da-45d4-8849-3d852bf040d6 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2992.252671] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 2992.252671] env[61215]: value = "task-1690499" [ 2992.252671] env[61215]: _type = "Task" [ 2992.252671] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2992.763273] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690499, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.172346} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2992.763654] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2992.763654] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/606aa004-110a-4bdd-aa5b-6db84ddce29d" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2992.763872] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/e5ac9c7b-83d2-48dc-b97f-ea1f4a694f64" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2992.763998] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/e5ac9c7b-83d2-48dc-b97f-ea1f4a694f64" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2992.764395] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/e5ac9c7b-83d2-48dc-b97f-ea1f4a694f64" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2992.764675] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3707f806-36ed-4c1a-82b3-491c25f70841 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2992.769192] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 2992.769192] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]5240bd99-afa3-2121-4a1c-a961d2b75a48" [ 2992.769192] env[61215]: _type = "Task" [ 2992.769192] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2992.776858] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5240bd99-afa3-2121-4a1c-a961d2b75a48, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2993.280635] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5240bd99-afa3-2121-4a1c-a961d2b75a48, 'name': SearchDatastore_Task, 'duration_secs': 0.009097} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2993.281026] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/e5ac9c7b-83d2-48dc-b97f-ea1f4a694f64 is no longer used. Deleting! [ 2993.281191] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/e5ac9c7b-83d2-48dc-b97f-ea1f4a694f64 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2993.281471] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-28805c9f-b737-4e69-8054-2d701a2a2089 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2993.288108] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 2993.288108] env[61215]: value = "task-1690500" [ 2993.288108] env[61215]: _type = "Task" [ 2993.288108] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2993.295869] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690500, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2993.798404] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690500, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.103572} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2993.798670] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2993.798758] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/e5ac9c7b-83d2-48dc-b97f-ea1f4a694f64" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2993.798964] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/1dd2b538-1ac2-44ad-88b6-4c5495036a26" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2993.799214] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/1dd2b538-1ac2-44ad-88b6-4c5495036a26" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2993.799624] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/1dd2b538-1ac2-44ad-88b6-4c5495036a26" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2993.799860] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0097016-25ee-4cca-9832-4f187b20e56d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2993.804020] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 2993.804020] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]525d4675-d8f8-1518-f5e0-ae4e3079ee2a" [ 2993.804020] env[61215]: _type = "Task" [ 2993.804020] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2993.811185] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]525d4675-d8f8-1518-f5e0-ae4e3079ee2a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2994.314613] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]525d4675-d8f8-1518-f5e0-ae4e3079ee2a, 'name': SearchDatastore_Task, 'duration_secs': 0.011085} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2994.314937] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/1dd2b538-1ac2-44ad-88b6-4c5495036a26 is no longer used. Deleting! [ 2994.315130] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/1dd2b538-1ac2-44ad-88b6-4c5495036a26 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2994.315370] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1121c275-ffa8-4bda-85ae-d87826560bbe {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2994.321075] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 2994.321075] env[61215]: value = "task-1690501" [ 2994.321075] env[61215]: _type = "Task" [ 2994.321075] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2994.328133] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690501, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2994.830723] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690501, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.103289} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2994.830956] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2994.831182] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/1dd2b538-1ac2-44ad-88b6-4c5495036a26" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2994.831405] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/7075f5cb-1c25-417a-8edf-759c68066be7" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2994.831531] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/7075f5cb-1c25-417a-8edf-759c68066be7" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2994.832065] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/7075f5cb-1c25-417a-8edf-759c68066be7" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2994.832136] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d31c833c-3862-4854-b4b1-b3741e3c3199 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2994.836386] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 2994.836386] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52120d9b-b706-32f2-1c5c-ee31310aeac5" [ 2994.836386] env[61215]: _type = "Task" [ 2994.836386] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2994.844231] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52120d9b-b706-32f2-1c5c-ee31310aeac5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2995.348464] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52120d9b-b706-32f2-1c5c-ee31310aeac5, 'name': SearchDatastore_Task, 'duration_secs': 0.009063} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2995.348785] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/7075f5cb-1c25-417a-8edf-759c68066be7 is no longer used. Deleting! [ 2995.348935] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/7075f5cb-1c25-417a-8edf-759c68066be7 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2995.349231] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d88352c6-70cd-4992-b2ed-4f33c34b2fd3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2995.355776] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 2995.355776] env[61215]: value = "task-1690502" [ 2995.355776] env[61215]: _type = "Task" [ 2995.355776] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2995.363132] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690502, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2995.865729] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690502, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.1298} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2995.866074] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2995.866074] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/7075f5cb-1c25-417a-8edf-759c68066be7" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2995.866755] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/578317c7-e0be-4e48-9bd4-8e9a1425cf1d" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2995.866755] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/578317c7-e0be-4e48-9bd4-8e9a1425cf1d" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2995.866755] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/578317c7-e0be-4e48-9bd4-8e9a1425cf1d" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2995.866954] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70bd30ec-5991-4b9e-8a68-ead84bf07d3f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2995.871231] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 2995.871231] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]5234acb1-22a5-8a8c-650e-c52ed4d93ac7" [ 2995.871231] env[61215]: _type = "Task" [ 2995.871231] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2995.878935] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5234acb1-22a5-8a8c-650e-c52ed4d93ac7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2996.382354] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5234acb1-22a5-8a8c-650e-c52ed4d93ac7, 'name': SearchDatastore_Task, 'duration_secs': 0.008742} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2996.382653] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/578317c7-e0be-4e48-9bd4-8e9a1425cf1d is no longer used. Deleting! [ 2996.382802] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/578317c7-e0be-4e48-9bd4-8e9a1425cf1d {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2996.383079] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f280a821-45db-46d6-bee1-36852819a200 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2996.389345] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 2996.389345] env[61215]: value = "task-1690503" [ 2996.389345] env[61215]: _type = "Task" [ 2996.389345] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2996.396406] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690503, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2996.898404] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690503, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.098519} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2996.898696] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2996.898848] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/578317c7-e0be-4e48-9bd4-8e9a1425cf1d" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2996.899086] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/907e2f7c-5cde-4c11-8d95-de4b491efebe" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2996.899213] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/907e2f7c-5cde-4c11-8d95-de4b491efebe" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2996.899572] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/907e2f7c-5cde-4c11-8d95-de4b491efebe" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2996.899835] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b0aaeb7-663a-43e9-b533-dcd995129992 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2996.904521] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 2996.904521] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52addc52-bb30-39c0-8f2f-d346b363c31d" [ 2996.904521] env[61215]: _type = "Task" [ 2996.904521] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2996.911547] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52addc52-bb30-39c0-8f2f-d346b363c31d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2997.413934] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52addc52-bb30-39c0-8f2f-d346b363c31d, 'name': SearchDatastore_Task, 'duration_secs': 0.008245} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2997.414264] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/907e2f7c-5cde-4c11-8d95-de4b491efebe is no longer used. Deleting! [ 2997.414427] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/907e2f7c-5cde-4c11-8d95-de4b491efebe {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2997.414677] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-06650e20-3662-40e1-958a-f5d3bff657a2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2997.421092] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 2997.421092] env[61215]: value = "task-1690504" [ 2997.421092] env[61215]: _type = "Task" [ 2997.421092] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2997.428201] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690504, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2997.930722] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690504, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.096343} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2997.931078] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2997.931078] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/907e2f7c-5cde-4c11-8d95-de4b491efebe" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2997.931296] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/28fef0f4-6916-497c-b3fb-08aed9227e8e" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2997.931420] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/28fef0f4-6916-497c-b3fb-08aed9227e8e" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2997.931737] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/28fef0f4-6916-497c-b3fb-08aed9227e8e" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2997.932011] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2cebbe9a-a8f8-4ef9-8630-905d2b3e019d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2997.938831] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 2997.938831] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]5271756c-9ac3-34e6-567e-f63d5ad9f5ca" [ 2997.938831] env[61215]: _type = "Task" [ 2997.938831] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2997.946789] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5271756c-9ac3-34e6-567e-f63d5ad9f5ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2998.450779] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5271756c-9ac3-34e6-567e-f63d5ad9f5ca, 'name': SearchDatastore_Task, 'duration_secs': 0.008977} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2998.451086] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/28fef0f4-6916-497c-b3fb-08aed9227e8e is no longer used. Deleting! [ 2998.451283] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/28fef0f4-6916-497c-b3fb-08aed9227e8e {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2998.451582] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c9cdd7ba-6221-4dd3-8022-9e6727227708 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2998.458476] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 2998.458476] env[61215]: value = "task-1690505" [ 2998.458476] env[61215]: _type = "Task" [ 2998.458476] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2998.466049] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690505, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2998.968689] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690505, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.100755} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2998.968997] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2998.969087] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/28fef0f4-6916-497c-b3fb-08aed9227e8e" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2998.969307] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/6affd69f-f611-4b18-8380-abecd0188e4c" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2998.969429] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/6affd69f-f611-4b18-8380-abecd0188e4c" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2998.969744] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/6affd69f-f611-4b18-8380-abecd0188e4c" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2998.970010] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-977f1145-000e-41e9-ba83-74548340e574 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2998.974397] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 2998.974397] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52eb6d91-5064-bdb4-a0c5-5e831c217f6f" [ 2998.974397] env[61215]: _type = "Task" [ 2998.974397] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2998.981613] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52eb6d91-5064-bdb4-a0c5-5e831c217f6f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2999.485758] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52eb6d91-5064-bdb4-a0c5-5e831c217f6f, 'name': SearchDatastore_Task, 'duration_secs': 0.008779} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2999.486037] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/6affd69f-f611-4b18-8380-abecd0188e4c is no longer used. Deleting! [ 2999.486191] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/6affd69f-f611-4b18-8380-abecd0188e4c {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2999.486450] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-feb5bf4f-d3ff-4e5c-b70d-990c3823b119 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2999.492152] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 2999.492152] env[61215]: value = "task-1690506" [ 2999.492152] env[61215]: _type = "Task" [ 2999.492152] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2999.499346] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690506, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3000.002076] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690506, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.094638} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3000.002361] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3000.002486] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/6affd69f-f611-4b18-8380-abecd0188e4c" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3000.002755] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/3f1a0b47-47bb-4c19-a71f-81355e7a3566" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3000.002880] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/3f1a0b47-47bb-4c19-a71f-81355e7a3566" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3000.003207] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/3f1a0b47-47bb-4c19-a71f-81355e7a3566" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3000.003508] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c627d936-e3ea-478e-a094-33b3c9256d0c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3000.007670] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3000.007670] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]5272f302-817e-0da5-0b5f-8fb76dfde887" [ 3000.007670] env[61215]: _type = "Task" [ 3000.007670] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3000.015146] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5272f302-817e-0da5-0b5f-8fb76dfde887, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3000.519443] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5272f302-817e-0da5-0b5f-8fb76dfde887, 'name': SearchDatastore_Task, 'duration_secs': 0.009353} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3000.519748] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/3f1a0b47-47bb-4c19-a71f-81355e7a3566 is no longer used. Deleting! [ 3000.519894] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/3f1a0b47-47bb-4c19-a71f-81355e7a3566 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3000.520161] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e21c4320-c708-4f94-ba6d-5b3bf4fe95e6 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3000.526507] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3000.526507] env[61215]: value = "task-1690507" [ 3000.526507] env[61215]: _type = "Task" [ 3000.526507] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3000.533681] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690507, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3001.037055] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690507, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.105016} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3001.037055] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3001.037055] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/3f1a0b47-47bb-4c19-a71f-81355e7a3566" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3001.037440] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/60498980-7c63-408b-9e8f-465a0890088f" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3001.037440] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/60498980-7c63-408b-9e8f-465a0890088f" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3001.037666] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/60498980-7c63-408b-9e8f-465a0890088f" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3001.037925] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89d27bf1-8ddd-4230-b78e-46165a084670 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3001.042111] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3001.042111] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52e52f72-953c-e711-c646-df90e35faf90" [ 3001.042111] env[61215]: _type = "Task" [ 3001.042111] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3001.049210] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52e52f72-953c-e711-c646-df90e35faf90, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3001.552330] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52e52f72-953c-e711-c646-df90e35faf90, 'name': SearchDatastore_Task, 'duration_secs': 0.00908} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3001.552633] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/60498980-7c63-408b-9e8f-465a0890088f is no longer used. Deleting! [ 3001.552788] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/60498980-7c63-408b-9e8f-465a0890088f {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3001.553061] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-28ed322d-ef09-4f11-a8e7-e59360c53665 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3001.558819] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3001.558819] env[61215]: value = "task-1690508" [ 3001.558819] env[61215]: _type = "Task" [ 3001.558819] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3001.566250] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690508, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3002.068789] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690508, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.099874} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3002.069197] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3002.069197] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/60498980-7c63-408b-9e8f-465a0890088f" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3002.069410] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/e9d7a45b-6ee0-421f-82fb-db2ef8922c9b" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3002.069533] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/e9d7a45b-6ee0-421f-82fb-db2ef8922c9b" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3002.069855] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/e9d7a45b-6ee0-421f-82fb-db2ef8922c9b" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3002.070126] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9192dd0f-1e8e-483d-94cf-9a3b0b7db461 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3002.074213] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3002.074213] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]525c83f3-2cee-e409-f7ad-03c9836483c4" [ 3002.074213] env[61215]: _type = "Task" [ 3002.074213] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3002.081418] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]525c83f3-2cee-e409-f7ad-03c9836483c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3002.584940] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]525c83f3-2cee-e409-f7ad-03c9836483c4, 'name': SearchDatastore_Task, 'duration_secs': 0.008966} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3002.585267] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/e9d7a45b-6ee0-421f-82fb-db2ef8922c9b is no longer used. Deleting! [ 3002.585414] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/e9d7a45b-6ee0-421f-82fb-db2ef8922c9b {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3002.585675] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c2fa3f47-c102-49a0-8036-5bbfc32c56ca {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3002.592047] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3002.592047] env[61215]: value = "task-1690509" [ 3002.592047] env[61215]: _type = "Task" [ 3002.592047] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3002.599302] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690509, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3003.101879] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690509, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.093752} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3003.102235] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3003.102271] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/e9d7a45b-6ee0-421f-82fb-db2ef8922c9b" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3003.102482] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/ec92b9ad-c2d6-45d2-972c-3057b24506c8" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3003.102603] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/ec92b9ad-c2d6-45d2-972c-3057b24506c8" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3003.102927] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/ec92b9ad-c2d6-45d2-972c-3057b24506c8" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3003.103206] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b607b138-f536-4af6-a6e0-98afd5722be3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3003.107276] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3003.107276] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]5213eb3b-ee9e-f3f9-1aba-eb87436e9dc7" [ 3003.107276] env[61215]: _type = "Task" [ 3003.107276] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3003.114246] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5213eb3b-ee9e-f3f9-1aba-eb87436e9dc7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3003.618847] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5213eb3b-ee9e-f3f9-1aba-eb87436e9dc7, 'name': SearchDatastore_Task, 'duration_secs': 0.008681} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3003.619809] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/ec92b9ad-c2d6-45d2-972c-3057b24506c8 is no longer used. Deleting! [ 3003.619809] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/ec92b9ad-c2d6-45d2-972c-3057b24506c8 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3003.619809] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-390016ae-9d77-4b1a-8d9d-b0207e5da159 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3003.625926] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3003.625926] env[61215]: value = "task-1690510" [ 3003.625926] env[61215]: _type = "Task" [ 3003.625926] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3003.633475] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690510, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3004.135830] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690510, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.115442} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3004.136218] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3004.136218] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/ec92b9ad-c2d6-45d2-972c-3057b24506c8" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3004.136441] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/1a16430b-f576-459c-ac09-bbd0b1c4e7fd" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3004.136560] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/1a16430b-f576-459c-ac09-bbd0b1c4e7fd" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3004.136890] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/1a16430b-f576-459c-ac09-bbd0b1c4e7fd" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3004.137174] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce53c3e2-3995-450e-ac9a-5b38ab3b0d02 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3004.141354] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3004.141354] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52f6b2f3-27a3-d184-7df4-59fe75544e89" [ 3004.141354] env[61215]: _type = "Task" [ 3004.141354] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3004.148459] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52f6b2f3-27a3-d184-7df4-59fe75544e89, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3004.653393] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52f6b2f3-27a3-d184-7df4-59fe75544e89, 'name': SearchDatastore_Task, 'duration_secs': 0.012091} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3004.653656] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/1a16430b-f576-459c-ac09-bbd0b1c4e7fd is no longer used. Deleting! [ 3004.653809] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/1a16430b-f576-459c-ac09-bbd0b1c4e7fd {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3004.654084] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dadf75b9-bd70-44fe-895e-ad7cb702d559 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3004.660164] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3004.660164] env[61215]: value = "task-1690511" [ 3004.660164] env[61215]: _type = "Task" [ 3004.660164] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3004.667364] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690511, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3005.170202] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690511, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.108813} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3005.170620] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3005.170620] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/1a16430b-f576-459c-ac09-bbd0b1c4e7fd" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3005.170819] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/d55e4715-bd61-4b89-a2e5-dab1fdb9ddaa" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3005.170944] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/d55e4715-bd61-4b89-a2e5-dab1fdb9ddaa" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3005.171299] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/d55e4715-bd61-4b89-a2e5-dab1fdb9ddaa" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3005.171587] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2fdc10d2-7ff0-4f06-87a8-c54d7b7f0142 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3005.175802] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3005.175802] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]526cc7e0-7608-df82-fc78-1f8e327b4c9d" [ 3005.175802] env[61215]: _type = "Task" [ 3005.175802] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3005.182940] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]526cc7e0-7608-df82-fc78-1f8e327b4c9d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3005.686474] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]526cc7e0-7608-df82-fc78-1f8e327b4c9d, 'name': SearchDatastore_Task, 'duration_secs': 0.008484} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3005.686733] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/d55e4715-bd61-4b89-a2e5-dab1fdb9ddaa is no longer used. Deleting! [ 3005.686879] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/d55e4715-bd61-4b89-a2e5-dab1fdb9ddaa {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3005.687152] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-00ce77c4-cb7d-46e9-bc4d-962e0854af95 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3005.693333] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3005.693333] env[61215]: value = "task-1690512" [ 3005.693333] env[61215]: _type = "Task" [ 3005.693333] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3005.700719] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690512, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3006.203174] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690512, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.101619} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3006.203521] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3006.203595] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/d55e4715-bd61-4b89-a2e5-dab1fdb9ddaa" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3006.203846] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/d17788e5-4cc5-4405-9348-dc51ff998aa7" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3006.203977] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/d17788e5-4cc5-4405-9348-dc51ff998aa7" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3009.370036] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/d17788e5-4cc5-4405-9348-dc51ff998aa7" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3009.370449] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50f4c025-ac79-4ad9-af84-5542c13fb055 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3009.376621] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3009.376621] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52147b81-d5fb-ef30-34bb-a7d1cf1ebe52" [ 3009.376621] env[61215]: _type = "Task" [ 3009.376621] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3009.384986] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52147b81-d5fb-ef30-34bb-a7d1cf1ebe52, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3009.887526] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52147b81-d5fb-ef30-34bb-a7d1cf1ebe52, 'name': SearchDatastore_Task, 'duration_secs': 0.009632} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3009.887526] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/d17788e5-4cc5-4405-9348-dc51ff998aa7 is no longer used. Deleting! [ 3009.887740] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/d17788e5-4cc5-4405-9348-dc51ff998aa7 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3009.887886] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a7ac2fad-36b9-4be8-a23e-81399487e421 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3009.894295] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3009.894295] env[61215]: value = "task-1690513" [ 3009.894295] env[61215]: _type = "Task" [ 3009.894295] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3009.901238] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690513, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3010.404977] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690513, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.1282} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3010.405349] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3010.405398] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/d17788e5-4cc5-4405-9348-dc51ff998aa7" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3010.405592] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/dc35a7ec-88d2-47f5-b3e2-4ade76c80a19" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3010.405712] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/dc35a7ec-88d2-47f5-b3e2-4ade76c80a19" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3010.406046] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/dc35a7ec-88d2-47f5-b3e2-4ade76c80a19" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3010.406311] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13184faf-44cf-49d7-9332-da4aeb9737bd {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3010.410369] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3010.410369] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52db533e-1b05-a42d-feb6-3f93dae4d06e" [ 3010.410369] env[61215]: _type = "Task" [ 3010.410369] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3010.417429] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52db533e-1b05-a42d-feb6-3f93dae4d06e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3010.920881] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52db533e-1b05-a42d-feb6-3f93dae4d06e, 'name': SearchDatastore_Task, 'duration_secs': 0.010229} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3010.921213] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/dc35a7ec-88d2-47f5-b3e2-4ade76c80a19 is no longer used. Deleting! [ 3010.921360] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/dc35a7ec-88d2-47f5-b3e2-4ade76c80a19 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3010.921615] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-de122c47-6d01-4ee3-85b5-23d5f1464bea {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3010.927521] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3010.927521] env[61215]: value = "task-1690514" [ 3010.927521] env[61215]: _type = "Task" [ 3010.927521] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3010.934460] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690514, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3011.437753] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690514, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.1259} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3011.438101] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3011.438101] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/dc35a7ec-88d2-47f5-b3e2-4ade76c80a19" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3011.438317] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/47af2968-6d6e-4628-8b02-4da7842601d7" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3011.438435] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/47af2968-6d6e-4628-8b02-4da7842601d7" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3011.438757] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/47af2968-6d6e-4628-8b02-4da7842601d7" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3011.439032] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed680af3-dd40-405f-8885-00cb45d15999 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3011.443485] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3011.443485] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]5234bd75-d707-bf57-a5e7-68e722acf1e8" [ 3011.443485] env[61215]: _type = "Task" [ 3011.443485] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3011.451326] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5234bd75-d707-bf57-a5e7-68e722acf1e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3011.955187] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5234bd75-d707-bf57-a5e7-68e722acf1e8, 'name': SearchDatastore_Task, 'duration_secs': 0.009015} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3011.955465] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/47af2968-6d6e-4628-8b02-4da7842601d7 is no longer used. Deleting! [ 3011.955615] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/47af2968-6d6e-4628-8b02-4da7842601d7 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3011.955929] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1621958b-4343-47e9-8abd-2bef4de4acd2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3011.962360] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3011.962360] env[61215]: value = "task-1690515" [ 3011.962360] env[61215]: _type = "Task" [ 3011.962360] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3011.969768] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690515, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3012.472266] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690515, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.13089} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3012.472652] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3012.472652] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/47af2968-6d6e-4628-8b02-4da7842601d7" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3012.472864] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/59db50ba-652d-47ec-87e4-58b1bc08d0b2" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3012.472987] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/59db50ba-652d-47ec-87e4-58b1bc08d0b2" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3012.473325] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/59db50ba-652d-47ec-87e4-58b1bc08d0b2" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3012.473603] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52df707a-a666-4319-a7e6-f2588bf07737 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3012.477710] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3012.477710] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]528b1609-4195-af98-702d-7d6ee5710555" [ 3012.477710] env[61215]: _type = "Task" [ 3012.477710] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3012.485965] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]528b1609-4195-af98-702d-7d6ee5710555, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3012.988811] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]528b1609-4195-af98-702d-7d6ee5710555, 'name': SearchDatastore_Task, 'duration_secs': 0.009672} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3012.989091] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/59db50ba-652d-47ec-87e4-58b1bc08d0b2 is no longer used. Deleting! [ 3012.989244] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/59db50ba-652d-47ec-87e4-58b1bc08d0b2 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3012.989508] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5e7c6ba0-624b-432a-ab0e-cfd89f1d6bb7 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3012.995400] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3012.995400] env[61215]: value = "task-1690516" [ 3012.995400] env[61215]: _type = "Task" [ 3012.995400] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3013.002737] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690516, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3013.505611] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690516, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3014.006046] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690516, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3014.506715] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690516, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.223272} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3014.507088] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3014.507157] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/59db50ba-652d-47ec-87e4-58b1bc08d0b2" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3014.507356] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/dc3fa2cf-c06c-42f6-8074-f2abf6076021" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3014.507502] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/dc3fa2cf-c06c-42f6-8074-f2abf6076021" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3014.507825] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/dc3fa2cf-c06c-42f6-8074-f2abf6076021" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3014.508191] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17df8dd9-796d-4c37-a984-229ed9aed502 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3014.512590] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3014.512590] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]523c50ae-ec09-8d7d-90f3-60ddabf98e22" [ 3014.512590] env[61215]: _type = "Task" [ 3014.512590] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3014.520066] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]523c50ae-ec09-8d7d-90f3-60ddabf98e22, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3015.023782] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]523c50ae-ec09-8d7d-90f3-60ddabf98e22, 'name': SearchDatastore_Task, 'duration_secs': 0.008838} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3015.024110] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/dc3fa2cf-c06c-42f6-8074-f2abf6076021 is no longer used. Deleting! [ 3015.025028] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/dc3fa2cf-c06c-42f6-8074-f2abf6076021 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3015.025028] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-45059cf4-6a41-4d2f-a030-dab1fe2de660 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3015.030719] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3015.030719] env[61215]: value = "task-1690517" [ 3015.030719] env[61215]: _type = "Task" [ 3015.030719] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3015.037948] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690517, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3015.540587] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690517, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.10228} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3015.540958] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3015.540995] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/dc3fa2cf-c06c-42f6-8074-f2abf6076021" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3015.541212] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/2c08bad3-6428-4ba6-82fb-48cca2e6b5fb" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3015.541332] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/2c08bad3-6428-4ba6-82fb-48cca2e6b5fb" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3015.541684] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/2c08bad3-6428-4ba6-82fb-48cca2e6b5fb" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3015.541948] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7832d96-2a3e-4c12-9bd4-33acbe861f60 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3015.546153] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3015.546153] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52d4f859-6c14-f34e-7bd9-c6fdbb9ca20d" [ 3015.546153] env[61215]: _type = "Task" [ 3015.546153] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3015.553152] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52d4f859-6c14-f34e-7bd9-c6fdbb9ca20d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3016.057946] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52d4f859-6c14-f34e-7bd9-c6fdbb9ca20d, 'name': SearchDatastore_Task, 'duration_secs': 0.00917} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3016.057946] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/2c08bad3-6428-4ba6-82fb-48cca2e6b5fb is no longer used. Deleting! [ 3016.057946] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/2c08bad3-6428-4ba6-82fb-48cca2e6b5fb {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3016.057946] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c7c26508-5d96-4f34-b27e-cd3aa48d0537 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3016.063945] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3016.063945] env[61215]: value = "task-1690518" [ 3016.063945] env[61215]: _type = "Task" [ 3016.063945] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3016.072181] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690518, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3016.622334] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690518, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.115199} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3016.622334] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3016.622334] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/2c08bad3-6428-4ba6-82fb-48cca2e6b5fb" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3016.622334] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/e1562371-25c6-4e77-a0d1-ca454820a5da" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3016.622334] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/e1562371-25c6-4e77-a0d1-ca454820a5da" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3016.622334] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/e1562371-25c6-4e77-a0d1-ca454820a5da" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3016.622334] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1fec6800-b680-47e6-a3d6-b3ef1ea344c9 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3016.622334] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3016.622334] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52ccac59-97b2-5bb1-cc5d-71a53a289f60" [ 3016.622334] env[61215]: _type = "Task" [ 3016.622334] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3016.622334] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52ccac59-97b2-5bb1-cc5d-71a53a289f60, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3016.622334] env[61215]: WARNING oslo_vmware.rw_handles [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 3016.622334] env[61215]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 3016.622334] env[61215]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 3016.622334] env[61215]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 3016.622334] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 3016.622334] env[61215]: ERROR oslo_vmware.rw_handles response.begin() [ 3016.622334] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 3016.622334] env[61215]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 3016.622334] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 3016.622334] env[61215]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 3016.622334] env[61215]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 3016.622334] env[61215]: ERROR oslo_vmware.rw_handles [ 3016.622334] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Downloaded image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to vmware_temp/403b43ee-6690-40d8-9e85-d6fd5f4f3850/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 3016.622334] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Caching image {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 3016.622334] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Copying Virtual Disk [datastore1] vmware_temp/403b43ee-6690-40d8-9e85-d6fd5f4f3850/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk to [datastore1] vmware_temp/403b43ee-6690-40d8-9e85-d6fd5f4f3850/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk {{(pid=61215) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 3016.623726] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-012ce112-e480-4a17-8024-8142889fae45 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3016.623726] env[61215]: DEBUG oslo_vmware.api [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Waiting for the task: (returnval){ [ 3016.623726] env[61215]: value = "task-1690519" [ 3016.623726] env[61215]: _type = "Task" [ 3016.623726] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3016.623726] env[61215]: DEBUG oslo_vmware.api [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Task: {'id': task-1690519, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3017.091578] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52ccac59-97b2-5bb1-cc5d-71a53a289f60, 'name': SearchDatastore_Task, 'duration_secs': 0.009333} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3017.091999] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/e1562371-25c6-4e77-a0d1-ca454820a5da is no longer used. Deleting! [ 3017.092257] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/e1562371-25c6-4e77-a0d1-ca454820a5da {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3017.092583] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6b3f0dbb-b8d6-4a58-afe7-3460df81609e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3017.098363] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3017.098363] env[61215]: value = "task-1690520" [ 3017.098363] env[61215]: _type = "Task" [ 3017.098363] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3017.106057] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690520, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3017.113325] env[61215]: DEBUG oslo_vmware.exceptions [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Fault InvalidArgument not matched. {{(pid=61215) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 3017.113614] env[61215]: DEBUG oslo_concurrency.lockutils [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3017.114183] env[61215]: ERROR nova.compute.manager [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3017.114183] env[61215]: Faults: ['InvalidArgument'] [ 3017.114183] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Traceback (most recent call last): [ 3017.114183] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 3017.114183] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] yield resources [ 3017.114183] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 3017.114183] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] self.driver.spawn(context, instance, image_meta, [ 3017.114183] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 3017.114183] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 3017.114183] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 3017.114183] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] self._fetch_image_if_missing(context, vi) [ 3017.114183] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 3017.114183] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] image_cache(vi, tmp_image_ds_loc) [ 3017.114183] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 3017.114183] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] vm_util.copy_virtual_disk( [ 3017.114183] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 3017.114183] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] session._wait_for_task(vmdk_copy_task) [ 3017.114183] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 3017.114183] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] return self.wait_for_task(task_ref) [ 3017.114183] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 3017.114183] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] return evt.wait() [ 3017.114183] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 3017.114183] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] result = hub.switch() [ 3017.114183] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 3017.114183] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] return self.greenlet.switch() [ 3017.114183] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 3017.114183] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] self.f(*self.args, **self.kw) [ 3017.114183] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 3017.114183] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] raise exceptions.translate_fault(task_info.error) [ 3017.114183] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3017.114183] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Faults: ['InvalidArgument'] [ 3017.114183] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] [ 3017.115049] env[61215]: INFO nova.compute.manager [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Terminating instance [ 3017.116114] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3017.116358] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3017.116615] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6ecaa0b7-b33e-49cf-8877-8cd8dc616947 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3017.118956] env[61215]: DEBUG nova.compute.manager [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 3017.119217] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 3017.119993] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4539fa70-36ca-4a32-9129-3461e9cba247 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3017.126798] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Unregistering the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 3017.127680] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-89e26d61-6efc-488d-a6ef-f57750eeb32e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3017.128975] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3017.129169] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61215) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 3017.129814] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-635654b6-5518-42c3-a68d-81ae82b87d7b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3017.134983] env[61215]: DEBUG oslo_vmware.api [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Waiting for the task: (returnval){ [ 3017.134983] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]5235d8fc-8441-5146-96da-3fb7cf785527" [ 3017.134983] env[61215]: _type = "Task" [ 3017.134983] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3017.143978] env[61215]: DEBUG oslo_vmware.api [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5235d8fc-8441-5146-96da-3fb7cf785527, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3017.215929] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Unregistered the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 3017.216196] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Deleting contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 3017.216353] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Deleting the datastore file [datastore1] 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3017.216631] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9e238c11-8e59-4e0e-b03c-b153dd430dec {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3017.224304] env[61215]: DEBUG oslo_vmware.api [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Waiting for the task: (returnval){ [ 3017.224304] env[61215]: value = "task-1690522" [ 3017.224304] env[61215]: _type = "Task" [ 3017.224304] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3017.231724] env[61215]: DEBUG oslo_vmware.api [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Task: {'id': task-1690522, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3017.608262] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690520, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.122574} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3017.608262] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3017.608552] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/e1562371-25c6-4e77-a0d1-ca454820a5da" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3017.608620] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/e59c6db1-2b82-4035-a564-459cc2761293" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3017.608764] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/e59c6db1-2b82-4035-a564-459cc2761293" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3017.609095] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/e59c6db1-2b82-4035-a564-459cc2761293" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3017.609363] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c7ccf84-db58-44cc-a57d-672b2cb855af {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3017.613982] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3017.613982] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]5266b7ae-f0cb-75ea-aa4f-b0fa07f47742" [ 3017.613982] env[61215]: _type = "Task" [ 3017.613982] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3017.620931] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5266b7ae-f0cb-75ea-aa4f-b0fa07f47742, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3017.643979] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Preparing fetch location {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 3017.644289] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Creating directory with path [datastore1] vmware_temp/50a2df0b-9d50-488b-9f38-028b60c7b8ad/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3017.644468] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e715baea-7f09-4ef3-9fca-b14c88c9dec3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3017.655133] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Created directory with path [datastore1] vmware_temp/50a2df0b-9d50-488b-9f38-028b60c7b8ad/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3017.655562] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Fetch image to [datastore1] vmware_temp/50a2df0b-9d50-488b-9f38-028b60c7b8ad/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 3017.655562] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to [datastore1] vmware_temp/50a2df0b-9d50-488b-9f38-028b60c7b8ad/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 3017.656300] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-395e19d4-773b-4fee-90f1-0927c752c2ad {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3017.662728] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee8fd758-8ad7-4dbf-93c4-9f1fedc3793a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3017.671516] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40399567-d568-4ddc-96a8-8d8676cb642d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3017.702085] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc37b764-f9bd-4737-888c-cb803e575bda {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3017.707744] env[61215]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7a8b2e51-db3b-4f6c-8d7a-018f8147962f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3017.729414] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 3017.736379] env[61215]: DEBUG oslo_vmware.api [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Task: {'id': task-1690522, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.068004} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3017.736630] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3017.736817] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Deleted contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 3017.736993] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 3017.738044] env[61215]: INFO nova.compute.manager [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Took 0.62 seconds to destroy the instance on the hypervisor. [ 3017.739407] env[61215]: DEBUG nova.compute.claims [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Aborting claim: {{(pid=61215) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 3017.739589] env[61215]: DEBUG oslo_concurrency.lockutils [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 3017.739803] env[61215]: DEBUG oslo_concurrency.lockutils [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 3017.782756] env[61215]: DEBUG oslo_vmware.rw_handles [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/50a2df0b-9d50-488b-9f38-028b60c7b8ad/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 3017.840736] env[61215]: DEBUG nova.scheduler.client.report [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Refreshing inventories for resource provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 3017.844822] env[61215]: DEBUG oslo_vmware.rw_handles [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Completed reading data from the image iterator. {{(pid=61215) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 3017.845029] env[61215]: DEBUG oslo_vmware.rw_handles [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/50a2df0b-9d50-488b-9f38-028b60c7b8ad/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 3017.860967] env[61215]: DEBUG nova.scheduler.client.report [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Updating ProviderTree inventory for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 3017.861217] env[61215]: DEBUG nova.compute.provider_tree [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Updating inventory in ProviderTree for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 173, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 3017.873040] env[61215]: DEBUG nova.scheduler.client.report [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Refreshing aggregate associations for resource provider 1329e087-aa78-44a2-9687-63a2b1b33fd5, aggregates: None {{(pid=61215) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 3017.889861] env[61215]: DEBUG nova.scheduler.client.report [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Refreshing trait associations for resource provider 1329e087-aa78-44a2-9687-63a2b1b33fd5, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=61215) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 3017.939734] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76c61f02-85fb-45e3-8810-f17d6d98e438 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3017.946997] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-256b4d13-4500-4ae9-aa3c-deecba7310ff {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3017.976989] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7790fd81-9576-4e34-b22c-42c228054b5b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3017.983734] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41162701-5ddc-40ef-9015-689b82d7a239 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3017.997473] env[61215]: DEBUG nova.compute.provider_tree [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Updating inventory in ProviderTree for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 3018.031788] env[61215]: DEBUG nova.scheduler.client.report [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Updated inventory for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 with generation 100 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:957}} [ 3018.032045] env[61215]: DEBUG nova.compute.provider_tree [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Updating resource provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 generation from 100 to 101 during operation: update_inventory {{(pid=61215) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 3018.032231] env[61215]: DEBUG nova.compute.provider_tree [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Updating inventory in ProviderTree for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 3018.053689] env[61215]: DEBUG oslo_concurrency.lockutils [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.314s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 3018.054241] env[61215]: ERROR nova.compute.manager [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3018.054241] env[61215]: Faults: ['InvalidArgument'] [ 3018.054241] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Traceback (most recent call last): [ 3018.054241] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 3018.054241] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] self.driver.spawn(context, instance, image_meta, [ 3018.054241] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 3018.054241] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 3018.054241] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 3018.054241] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] self._fetch_image_if_missing(context, vi) [ 3018.054241] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 3018.054241] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] image_cache(vi, tmp_image_ds_loc) [ 3018.054241] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 3018.054241] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] vm_util.copy_virtual_disk( [ 3018.054241] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 3018.054241] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] session._wait_for_task(vmdk_copy_task) [ 3018.054241] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 3018.054241] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] return self.wait_for_task(task_ref) [ 3018.054241] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 3018.054241] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] return evt.wait() [ 3018.054241] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 3018.054241] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] result = hub.switch() [ 3018.054241] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 3018.054241] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] return self.greenlet.switch() [ 3018.054241] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 3018.054241] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] self.f(*self.args, **self.kw) [ 3018.054241] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 3018.054241] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] raise exceptions.translate_fault(task_info.error) [ 3018.054241] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3018.054241] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Faults: ['InvalidArgument'] [ 3018.054241] env[61215]: ERROR nova.compute.manager [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] [ 3018.054945] env[61215]: DEBUG nova.compute.utils [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] VimFaultException {{(pid=61215) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 3018.056410] env[61215]: DEBUG nova.compute.manager [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Build of instance 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6 was re-scheduled: A specified parameter was not correct: fileType [ 3018.056410] env[61215]: Faults: ['InvalidArgument'] {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 3018.056782] env[61215]: DEBUG nova.compute.manager [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Unplugging VIFs for instance {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 3018.056961] env[61215]: DEBUG nova.compute.manager [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 3018.057148] env[61215]: DEBUG nova.compute.manager [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 3018.057317] env[61215]: DEBUG nova.network.neutron [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 3018.124475] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5266b7ae-f0cb-75ea-aa4f-b0fa07f47742, 'name': SearchDatastore_Task, 'duration_secs': 0.014904} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3018.124769] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/e59c6db1-2b82-4035-a564-459cc2761293 is no longer used. Deleting! [ 3018.124922] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/e59c6db1-2b82-4035-a564-459cc2761293 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3018.125191] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-06adf2d7-f790-4219-aac1-d28af216e15d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3018.131425] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3018.131425] env[61215]: value = "task-1690523" [ 3018.131425] env[61215]: _type = "Task" [ 3018.131425] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3018.138627] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690523, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3018.324106] env[61215]: DEBUG nova.network.neutron [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 3018.335191] env[61215]: INFO nova.compute.manager [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Took 0.28 seconds to deallocate network for instance. [ 3018.427757] env[61215]: INFO nova.scheduler.client.report [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Deleted allocations for instance 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6 [ 3018.452200] env[61215]: DEBUG oslo_concurrency.lockutils [None req-4996a236-39f2-4ace-9486-78d9c9a19202 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Lock "8ca1c8ea-ce6a-4945-873b-aa0e6489baf6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 297.412s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 3018.452476] env[61215]: DEBUG oslo_concurrency.lockutils [None req-2237b6aa-b22f-4233-b44c-4d5f2e96f289 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Lock "8ca1c8ea-ce6a-4945-873b-aa0e6489baf6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 101.967s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 3018.452701] env[61215]: DEBUG oslo_concurrency.lockutils [None req-2237b6aa-b22f-4233-b44c-4d5f2e96f289 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Acquiring lock "8ca1c8ea-ce6a-4945-873b-aa0e6489baf6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 3018.452914] env[61215]: DEBUG oslo_concurrency.lockutils [None req-2237b6aa-b22f-4233-b44c-4d5f2e96f289 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Lock "8ca1c8ea-ce6a-4945-873b-aa0e6489baf6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 3018.453104] env[61215]: DEBUG oslo_concurrency.lockutils [None req-2237b6aa-b22f-4233-b44c-4d5f2e96f289 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Lock "8ca1c8ea-ce6a-4945-873b-aa0e6489baf6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 3018.455149] env[61215]: INFO nova.compute.manager [None req-2237b6aa-b22f-4233-b44c-4d5f2e96f289 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Terminating instance [ 3018.457278] env[61215]: DEBUG nova.compute.manager [None req-2237b6aa-b22f-4233-b44c-4d5f2e96f289 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 3018.457497] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-2237b6aa-b22f-4233-b44c-4d5f2e96f289 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 3018.457994] env[61215]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-36de46a1-44a1-48c2-88d7-8fac3cc11dc4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3018.467199] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-901d3927-1daa-48f7-af54-3d28b8a0fddd {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3018.491063] env[61215]: WARNING nova.virt.vmwareapi.vmops [None req-2237b6aa-b22f-4233-b44c-4d5f2e96f289 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6 could not be found. [ 3018.491275] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-2237b6aa-b22f-4233-b44c-4d5f2e96f289 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 3018.491462] env[61215]: INFO nova.compute.manager [None req-2237b6aa-b22f-4233-b44c-4d5f2e96f289 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Took 0.03 seconds to destroy the instance on the hypervisor. [ 3018.491702] env[61215]: DEBUG oslo.service.loopingcall [None req-2237b6aa-b22f-4233-b44c-4d5f2e96f289 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 3018.491920] env[61215]: DEBUG nova.compute.manager [-] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 3018.492028] env[61215]: DEBUG nova.network.neutron [-] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 3018.519794] env[61215]: DEBUG nova.network.neutron [-] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 3018.527926] env[61215]: INFO nova.compute.manager [-] [instance: 8ca1c8ea-ce6a-4945-873b-aa0e6489baf6] Took 0.04 seconds to deallocate network for instance. [ 3018.609128] env[61215]: DEBUG oslo_concurrency.lockutils [None req-2237b6aa-b22f-4233-b44c-4d5f2e96f289 tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Lock "8ca1c8ea-ce6a-4945-873b-aa0e6489baf6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.156s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 3018.642903] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690523, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.109444} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3018.643094] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3018.643300] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/e59c6db1-2b82-4035-a564-459cc2761293" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3018.643551] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/967d3475-8e02-444c-9f62-5e7bee87fb8a" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3018.643677] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/967d3475-8e02-444c-9f62-5e7bee87fb8a" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3018.643996] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/967d3475-8e02-444c-9f62-5e7bee87fb8a" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3018.644277] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b8f11c7-6c16-4e05-938d-6131c73e7f2b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3018.648759] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3018.648759] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]527cfc1a-819b-ae12-8b70-06bc5cea505c" [ 3018.648759] env[61215]: _type = "Task" [ 3018.648759] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3018.656255] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]527cfc1a-819b-ae12-8b70-06bc5cea505c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3019.160165] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]527cfc1a-819b-ae12-8b70-06bc5cea505c, 'name': SearchDatastore_Task, 'duration_secs': 0.009651} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3019.160547] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/967d3475-8e02-444c-9f62-5e7bee87fb8a is no longer used. Deleting! [ 3019.160782] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/967d3475-8e02-444c-9f62-5e7bee87fb8a {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3019.161092] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ffe00577-f3ce-4e8e-af3e-882bfc183682 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3019.166948] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3019.166948] env[61215]: value = "task-1690524" [ 3019.166948] env[61215]: _type = "Task" [ 3019.166948] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3019.174790] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690524, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3019.542116] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Acquiring lock "6b0b59ba-dc1b-4d0c-8202-9c977e0b5e68" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 3019.542404] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Lock "6b0b59ba-dc1b-4d0c-8202-9c977e0b5e68" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 3019.553673] env[61215]: DEBUG nova.compute.manager [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 6b0b59ba-dc1b-4d0c-8202-9c977e0b5e68] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 3019.601518] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 3019.601765] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 3019.603300] env[61215]: INFO nova.compute.claims [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 6b0b59ba-dc1b-4d0c-8202-9c977e0b5e68] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 3019.676915] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690524, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.120449} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3019.679162] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3019.679340] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/967d3475-8e02-444c-9f62-5e7bee87fb8a" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3019.679562] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/4e5561a3-16c4-4011-9364-3201c12a7f95" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3019.679682] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/4e5561a3-16c4-4011-9364-3201c12a7f95" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3019.679993] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/4e5561a3-16c4-4011-9364-3201c12a7f95" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3019.680581] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30cea4a3-5103-460c-a8bd-41eee99a7f73 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3019.684787] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3019.684787] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]524017fc-4af2-72ec-912b-fc7dc5fe9901" [ 3019.684787] env[61215]: _type = "Task" [ 3019.684787] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3019.693527] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]524017fc-4af2-72ec-912b-fc7dc5fe9901, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3019.702634] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6d552b8-c826-439f-a5fb-dbb0845a2a14 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3019.708891] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95694f8d-0928-4ce3-9fa3-8f4039e69f4b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3019.739276] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44d19b0c-bbc7-46fc-b626-4c3a2f1ab450 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3019.745871] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bab0c0c4-8be4-4f5e-8886-8e7d51285113 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3019.758489] env[61215]: DEBUG nova.compute.provider_tree [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Updating inventory in ProviderTree for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 3019.784858] env[61215]: ERROR nova.scheduler.client.report [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [req-69e3a3bc-b7c8-483c-822d-dbb8e9853aab] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 1329e087-aa78-44a2-9687-63a2b1b33fd5. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-69e3a3bc-b7c8-483c-822d-dbb8e9853aab"}]} [ 3019.799164] env[61215]: DEBUG nova.scheduler.client.report [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Refreshing inventories for resource provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 3019.810673] env[61215]: DEBUG nova.scheduler.client.report [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Updating ProviderTree inventory for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 3019.810879] env[61215]: DEBUG nova.compute.provider_tree [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Updating inventory in ProviderTree for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 3019.820477] env[61215]: DEBUG nova.scheduler.client.report [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Refreshing aggregate associations for resource provider 1329e087-aa78-44a2-9687-63a2b1b33fd5, aggregates: None {{(pid=61215) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 3019.835406] env[61215]: DEBUG nova.scheduler.client.report [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Refreshing trait associations for resource provider 1329e087-aa78-44a2-9687-63a2b1b33fd5, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=61215) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 3019.879419] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4339a501-2585-4bb1-b517-1caff9041da3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3019.887097] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d5fb079-6d74-4ca1-8d9e-d19b9d259ebe {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3019.918957] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9a2da3a-379f-4fff-a123-84a0a2fba759 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3019.925864] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df32deab-387a-454b-8d33-f022b21323ce {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3019.938717] env[61215]: DEBUG nova.compute.provider_tree [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Updating inventory in ProviderTree for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 3019.976949] env[61215]: DEBUG nova.scheduler.client.report [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Updated inventory for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 with generation 102 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:957}} [ 3019.977246] env[61215]: DEBUG nova.compute.provider_tree [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Updating resource provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 generation from 102 to 103 during operation: update_inventory {{(pid=61215) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 3019.977431] env[61215]: DEBUG nova.compute.provider_tree [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Updating inventory in ProviderTree for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 3019.991130] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.389s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 3019.991629] env[61215]: DEBUG nova.compute.manager [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 6b0b59ba-dc1b-4d0c-8202-9c977e0b5e68] Start building networks asynchronously for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 3020.023431] env[61215]: DEBUG nova.compute.utils [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Using /dev/sd instead of None {{(pid=61215) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 3020.024664] env[61215]: DEBUG nova.compute.manager [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 6b0b59ba-dc1b-4d0c-8202-9c977e0b5e68] Allocating IP information in the background. {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 3020.024839] env[61215]: DEBUG nova.network.neutron [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 6b0b59ba-dc1b-4d0c-8202-9c977e0b5e68] allocate_for_instance() {{(pid=61215) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 3020.032738] env[61215]: DEBUG nova.compute.manager [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 6b0b59ba-dc1b-4d0c-8202-9c977e0b5e68] Start building block device mappings for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 3020.088017] env[61215]: DEBUG nova.policy [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6578389615ad46528d49d98bf36b459a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd16229c82ee9494f9921831a13c6bf7e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61215) authorize /opt/stack/nova/nova/policy.py:203}} [ 3020.092727] env[61215]: DEBUG nova.compute.manager [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 6b0b59ba-dc1b-4d0c-8202-9c977e0b5e68] Start spawning the instance on the hypervisor. {{(pid=61215) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 3020.117236] env[61215]: DEBUG nova.virt.hardware [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:05:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:04:45Z,direct_url=,disk_format='vmdk',id=e91f0c25-9ff9-4937-8440-f47cfb2028bc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9bc3f57c82894c5d9e08b66e77a25dc5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:04:46Z,virtual_size=,visibility=), allow threads: False {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 3020.117489] env[61215]: DEBUG nova.virt.hardware [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Flavor limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 3020.117681] env[61215]: DEBUG nova.virt.hardware [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Image limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 3020.117880] env[61215]: DEBUG nova.virt.hardware [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Flavor pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 3020.118045] env[61215]: DEBUG nova.virt.hardware [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Image pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 3020.118202] env[61215]: DEBUG nova.virt.hardware [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 3020.118407] env[61215]: DEBUG nova.virt.hardware [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 3020.118565] env[61215]: DEBUG nova.virt.hardware [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 3020.118777] env[61215]: DEBUG nova.virt.hardware [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Got 1 possible topologies {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 3020.118981] env[61215]: DEBUG nova.virt.hardware [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 3020.119180] env[61215]: DEBUG nova.virt.hardware [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 3020.120042] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c31bb70-5adc-4bf0-9810-26c96d68a4ec {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3020.128027] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fa6f9cc-04f8-4ae7-b92d-a8784cc7b272 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3020.195505] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]524017fc-4af2-72ec-912b-fc7dc5fe9901, 'name': SearchDatastore_Task, 'duration_secs': 0.009267} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3020.195795] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/4e5561a3-16c4-4011-9364-3201c12a7f95 is no longer used. Deleting! [ 3020.195947] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/4e5561a3-16c4-4011-9364-3201c12a7f95 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3020.196221] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-516ae7cc-a4a9-4614-881e-bda89f7a5677 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3020.202703] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3020.202703] env[61215]: value = "task-1690525" [ 3020.202703] env[61215]: _type = "Task" [ 3020.202703] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3020.209918] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690525, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3020.383221] env[61215]: DEBUG nova.network.neutron [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 6b0b59ba-dc1b-4d0c-8202-9c977e0b5e68] Successfully created port: 7c2c0a8a-bbee-4e54-90a5-344bebd3b451 {{(pid=61215) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 3020.713169] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690525, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.094812} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3020.713558] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3020.713558] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/4e5561a3-16c4-4011-9364-3201c12a7f95" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3020.713770] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/e16fd165-9d75-4a0c-8f83-c04377df3a79" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3020.713894] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/e16fd165-9d75-4a0c-8f83-c04377df3a79" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3020.714249] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/e16fd165-9d75-4a0c-8f83-c04377df3a79" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3020.714524] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b9c1a37-6b83-47b4-88e7-0da8bda3b280 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3020.719062] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3020.719062] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]5270f585-8b12-7e2c-3491-539ea0388674" [ 3020.719062] env[61215]: _type = "Task" [ 3020.719062] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3020.727209] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5270f585-8b12-7e2c-3491-539ea0388674, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3021.109133] env[61215]: DEBUG nova.compute.manager [req-ba6ceb19-5b45-43ff-9dfe-1a468d88ef8b req-dc7127f4-24b9-461f-a34d-b887082c309a service nova] [instance: 6b0b59ba-dc1b-4d0c-8202-9c977e0b5e68] Received event network-vif-plugged-7c2c0a8a-bbee-4e54-90a5-344bebd3b451 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 3021.109366] env[61215]: DEBUG oslo_concurrency.lockutils [req-ba6ceb19-5b45-43ff-9dfe-1a468d88ef8b req-dc7127f4-24b9-461f-a34d-b887082c309a service nova] Acquiring lock "6b0b59ba-dc1b-4d0c-8202-9c977e0b5e68-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 3021.109576] env[61215]: DEBUG oslo_concurrency.lockutils [req-ba6ceb19-5b45-43ff-9dfe-1a468d88ef8b req-dc7127f4-24b9-461f-a34d-b887082c309a service nova] Lock "6b0b59ba-dc1b-4d0c-8202-9c977e0b5e68-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 3021.109747] env[61215]: DEBUG oslo_concurrency.lockutils [req-ba6ceb19-5b45-43ff-9dfe-1a468d88ef8b req-dc7127f4-24b9-461f-a34d-b887082c309a service nova] Lock "6b0b59ba-dc1b-4d0c-8202-9c977e0b5e68-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 3021.109917] env[61215]: DEBUG nova.compute.manager [req-ba6ceb19-5b45-43ff-9dfe-1a468d88ef8b req-dc7127f4-24b9-461f-a34d-b887082c309a service nova] [instance: 6b0b59ba-dc1b-4d0c-8202-9c977e0b5e68] No waiting events found dispatching network-vif-plugged-7c2c0a8a-bbee-4e54-90a5-344bebd3b451 {{(pid=61215) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 3021.110099] env[61215]: WARNING nova.compute.manager [req-ba6ceb19-5b45-43ff-9dfe-1a468d88ef8b req-dc7127f4-24b9-461f-a34d-b887082c309a service nova] [instance: 6b0b59ba-dc1b-4d0c-8202-9c977e0b5e68] Received unexpected event network-vif-plugged-7c2c0a8a-bbee-4e54-90a5-344bebd3b451 for instance with vm_state building and task_state spawning. [ 3021.198050] env[61215]: DEBUG nova.network.neutron [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 6b0b59ba-dc1b-4d0c-8202-9c977e0b5e68] Successfully updated port: 7c2c0a8a-bbee-4e54-90a5-344bebd3b451 {{(pid=61215) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 3021.209691] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Acquiring lock "refresh_cache-6b0b59ba-dc1b-4d0c-8202-9c977e0b5e68" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3021.209841] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Acquired lock "refresh_cache-6b0b59ba-dc1b-4d0c-8202-9c977e0b5e68" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3021.209993] env[61215]: DEBUG nova.network.neutron [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 6b0b59ba-dc1b-4d0c-8202-9c977e0b5e68] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 3021.229931] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5270f585-8b12-7e2c-3491-539ea0388674, 'name': SearchDatastore_Task, 'duration_secs': 0.008921} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3021.229931] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/e16fd165-9d75-4a0c-8f83-c04377df3a79 is no longer used. Deleting! [ 3021.230081] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/e16fd165-9d75-4a0c-8f83-c04377df3a79 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3021.230319] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-135b42f2-b221-4d38-83b4-0cbe0e1e171e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3021.236864] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3021.236864] env[61215]: value = "task-1690526" [ 3021.236864] env[61215]: _type = "Task" [ 3021.236864] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3021.246045] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690526, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3021.254240] env[61215]: DEBUG nova.network.neutron [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 6b0b59ba-dc1b-4d0c-8202-9c977e0b5e68] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 3021.422144] env[61215]: DEBUG nova.network.neutron [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 6b0b59ba-dc1b-4d0c-8202-9c977e0b5e68] Updating instance_info_cache with network_info: [{"id": "7c2c0a8a-bbee-4e54-90a5-344bebd3b451", "address": "fa:16:3e:82:e0:41", "network": {"id": "ca8a320f-bc61-4d27-ae96-1e7235aa0925", "bridge": "br-int", "label": "tempest-ServersTestJSON-1082542131-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d16229c82ee9494f9921831a13c6bf7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aef08290-001a-4ae8-aff0-1889e2211389", "external-id": "nsx-vlan-transportzone-389", "segmentation_id": 389, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c2c0a8a-bb", "ovs_interfaceid": "7c2c0a8a-bbee-4e54-90a5-344bebd3b451", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 3021.433960] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Releasing lock "refresh_cache-6b0b59ba-dc1b-4d0c-8202-9c977e0b5e68" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3021.434269] env[61215]: DEBUG nova.compute.manager [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 6b0b59ba-dc1b-4d0c-8202-9c977e0b5e68] Instance network_info: |[{"id": "7c2c0a8a-bbee-4e54-90a5-344bebd3b451", "address": "fa:16:3e:82:e0:41", "network": {"id": "ca8a320f-bc61-4d27-ae96-1e7235aa0925", "bridge": "br-int", "label": "tempest-ServersTestJSON-1082542131-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d16229c82ee9494f9921831a13c6bf7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aef08290-001a-4ae8-aff0-1889e2211389", "external-id": "nsx-vlan-transportzone-389", "segmentation_id": 389, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c2c0a8a-bb", "ovs_interfaceid": "7c2c0a8a-bbee-4e54-90a5-344bebd3b451", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 3021.434708] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 6b0b59ba-dc1b-4d0c-8202-9c977e0b5e68] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:82:e0:41', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aef08290-001a-4ae8-aff0-1889e2211389', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7c2c0a8a-bbee-4e54-90a5-344bebd3b451', 'vif_model': 'vmxnet3'}] {{(pid=61215) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 3021.442423] env[61215]: DEBUG oslo.service.loopingcall [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 3021.442886] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b0b59ba-dc1b-4d0c-8202-9c977e0b5e68] Creating VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 3021.443132] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-57162585-e6b8-42b1-8c03-3e66aa206456 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3021.464985] env[61215]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 3021.464985] env[61215]: value = "task-1690527" [ 3021.464985] env[61215]: _type = "Task" [ 3021.464985] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3021.472718] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690527, 'name': CreateVM_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3021.746881] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690526, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134291} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3021.747244] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3021.747244] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/e16fd165-9d75-4a0c-8f83-c04377df3a79" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3021.747437] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/109daf4b-e7cc-432a-a8f5-9998849286e6" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3021.747560] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/109daf4b-e7cc-432a-a8f5-9998849286e6" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3021.747979] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/109daf4b-e7cc-432a-a8f5-9998849286e6" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3021.748258] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a8ba2e9-fd14-4fdc-ab2f-0242324c8f48 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3021.754062] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3021.754062] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]522a029e-4464-4a39-0069-9445c63c0dc6" [ 3021.754062] env[61215]: _type = "Task" [ 3021.754062] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3021.761448] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]522a029e-4464-4a39-0069-9445c63c0dc6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3021.974914] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690527, 'name': CreateVM_Task, 'duration_secs': 0.320056} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3021.975082] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b0b59ba-dc1b-4d0c-8202-9c977e0b5e68] Created VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 3021.975748] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3021.975913] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3021.976251] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3021.976493] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d670db5-302f-4a76-8205-2e719dcf939c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3021.980674] env[61215]: DEBUG oslo_vmware.api [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Waiting for the task: (returnval){ [ 3021.980674] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52701d90-fb39-50ad-bab1-f36ae11d0ef9" [ 3021.980674] env[61215]: _type = "Task" [ 3021.980674] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3021.987684] env[61215]: DEBUG oslo_vmware.api [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52701d90-fb39-50ad-bab1-f36ae11d0ef9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3022.264796] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]522a029e-4464-4a39-0069-9445c63c0dc6, 'name': SearchDatastore_Task, 'duration_secs': 0.012525} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3022.265146] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/109daf4b-e7cc-432a-a8f5-9998849286e6 is no longer used. Deleting! [ 3022.265294] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/109daf4b-e7cc-432a-a8f5-9998849286e6 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3022.265558] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a8720911-17b8-4116-ba06-380134658160 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3022.272009] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3022.272009] env[61215]: value = "task-1690528" [ 3022.272009] env[61215]: _type = "Task" [ 3022.272009] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3022.279229] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690528, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3022.491281] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3022.491621] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] [instance: 6b0b59ba-dc1b-4d0c-8202-9c977e0b5e68] Processing image e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 3022.491862] env[61215]: DEBUG oslo_concurrency.lockutils [None req-9b61da8c-ad79-4048-a13b-79d8153b12ee tempest-ServersTestJSON-1355600418 tempest-ServersTestJSON-1355600418-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3022.785504] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690528, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.121715} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3022.785858] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3022.785985] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/109daf4b-e7cc-432a-a8f5-9998849286e6" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3022.786296] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/1184635d-f5f6-42d1-80ac-b5a20a46b430" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3022.786463] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/1184635d-f5f6-42d1-80ac-b5a20a46b430" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3022.786876] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/1184635d-f5f6-42d1-80ac-b5a20a46b430" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3022.787242] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0bfcd3c-9e00-4cb6-a145-a479047cf50e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3022.792499] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3022.792499] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52e78ed3-ba10-8116-0175-8434ee85d336" [ 3022.792499] env[61215]: _type = "Task" [ 3022.792499] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3022.802319] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52e78ed3-ba10-8116-0175-8434ee85d336, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3023.137681] env[61215]: DEBUG nova.compute.manager [req-333efcdd-1f41-4f44-866a-4fc52e42f048 req-ee40aced-fd73-4670-b724-405a4d95ac54 service nova] [instance: 6b0b59ba-dc1b-4d0c-8202-9c977e0b5e68] Received event network-changed-7c2c0a8a-bbee-4e54-90a5-344bebd3b451 {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 3023.137934] env[61215]: DEBUG nova.compute.manager [req-333efcdd-1f41-4f44-866a-4fc52e42f048 req-ee40aced-fd73-4670-b724-405a4d95ac54 service nova] [instance: 6b0b59ba-dc1b-4d0c-8202-9c977e0b5e68] Refreshing instance network info cache due to event network-changed-7c2c0a8a-bbee-4e54-90a5-344bebd3b451. {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 3023.138134] env[61215]: DEBUG oslo_concurrency.lockutils [req-333efcdd-1f41-4f44-866a-4fc52e42f048 req-ee40aced-fd73-4670-b724-405a4d95ac54 service nova] Acquiring lock "refresh_cache-6b0b59ba-dc1b-4d0c-8202-9c977e0b5e68" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3023.138280] env[61215]: DEBUG oslo_concurrency.lockutils [req-333efcdd-1f41-4f44-866a-4fc52e42f048 req-ee40aced-fd73-4670-b724-405a4d95ac54 service nova] Acquired lock "refresh_cache-6b0b59ba-dc1b-4d0c-8202-9c977e0b5e68" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3023.138442] env[61215]: DEBUG nova.network.neutron [req-333efcdd-1f41-4f44-866a-4fc52e42f048 req-ee40aced-fd73-4670-b724-405a4d95ac54 service nova] [instance: 6b0b59ba-dc1b-4d0c-8202-9c977e0b5e68] Refreshing network info cache for port 7c2c0a8a-bbee-4e54-90a5-344bebd3b451 {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 3023.304578] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52e78ed3-ba10-8116-0175-8434ee85d336, 'name': SearchDatastore_Task, 'duration_secs': 0.008747} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3023.304889] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/1184635d-f5f6-42d1-80ac-b5a20a46b430 is no longer used. Deleting! [ 3023.305046] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/1184635d-f5f6-42d1-80ac-b5a20a46b430 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3023.305315] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6ede2a00-cbb3-4f7f-ae8d-d3c2f4ec1611 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3023.311535] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3023.311535] env[61215]: value = "task-1690529" [ 3023.311535] env[61215]: _type = "Task" [ 3023.311535] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3023.321291] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690529, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3023.429977] env[61215]: DEBUG nova.network.neutron [req-333efcdd-1f41-4f44-866a-4fc52e42f048 req-ee40aced-fd73-4670-b724-405a4d95ac54 service nova] [instance: 6b0b59ba-dc1b-4d0c-8202-9c977e0b5e68] Updated VIF entry in instance network info cache for port 7c2c0a8a-bbee-4e54-90a5-344bebd3b451. {{(pid=61215) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 3023.430371] env[61215]: DEBUG nova.network.neutron [req-333efcdd-1f41-4f44-866a-4fc52e42f048 req-ee40aced-fd73-4670-b724-405a4d95ac54 service nova] [instance: 6b0b59ba-dc1b-4d0c-8202-9c977e0b5e68] Updating instance_info_cache with network_info: [{"id": "7c2c0a8a-bbee-4e54-90a5-344bebd3b451", "address": "fa:16:3e:82:e0:41", "network": {"id": "ca8a320f-bc61-4d27-ae96-1e7235aa0925", "bridge": "br-int", "label": "tempest-ServersTestJSON-1082542131-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d16229c82ee9494f9921831a13c6bf7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aef08290-001a-4ae8-aff0-1889e2211389", "external-id": "nsx-vlan-transportzone-389", "segmentation_id": 389, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c2c0a8a-bb", "ovs_interfaceid": "7c2c0a8a-bbee-4e54-90a5-344bebd3b451", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 3023.440391] env[61215]: DEBUG oslo_concurrency.lockutils [req-333efcdd-1f41-4f44-866a-4fc52e42f048 req-ee40aced-fd73-4670-b724-405a4d95ac54 service nova] Releasing lock "refresh_cache-6b0b59ba-dc1b-4d0c-8202-9c977e0b5e68" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3023.821245] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690529, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.12998} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3023.821558] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3023.821647] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/1184635d-f5f6-42d1-80ac-b5a20a46b430" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3023.821846] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/b6ca2c6a-24bf-4350-be34-2bdcbe74b766" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3023.821968] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/b6ca2c6a-24bf-4350-be34-2bdcbe74b766" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3023.822298] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/b6ca2c6a-24bf-4350-be34-2bdcbe74b766" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3023.822554] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70028c90-0b04-4869-a2d0-3e8bdf59db01 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3023.826656] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3023.826656] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]5246252f-eaad-1074-75ea-639323c94759" [ 3023.826656] env[61215]: _type = "Task" [ 3023.826656] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3023.833679] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5246252f-eaad-1074-75ea-639323c94759, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3024.337897] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5246252f-eaad-1074-75ea-639323c94759, 'name': SearchDatastore_Task, 'duration_secs': 0.008851} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3024.338232] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/b6ca2c6a-24bf-4350-be34-2bdcbe74b766 is no longer used. Deleting! [ 3024.338382] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/b6ca2c6a-24bf-4350-be34-2bdcbe74b766 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3024.338639] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1bcbc622-1f1f-4e8b-a13f-cd7bedecf78f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3024.344612] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3024.344612] env[61215]: value = "task-1690530" [ 3024.344612] env[61215]: _type = "Task" [ 3024.344612] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3024.352260] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690530, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3024.854282] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690530, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.128827} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3024.854605] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3024.854639] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/b6ca2c6a-24bf-4350-be34-2bdcbe74b766" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3024.855032] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/fe3ff768-4508-4f71-bdbf-cc790f67e325" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3024.855032] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/fe3ff768-4508-4f71-bdbf-cc790f67e325" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3024.855280] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/fe3ff768-4508-4f71-bdbf-cc790f67e325" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3024.855541] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69b4b125-d7ad-4d1e-9181-35662cc666cb {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3024.859999] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3024.859999] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52f4ecb9-17f5-b64c-1096-c71dab316e30" [ 3024.859999] env[61215]: _type = "Task" [ 3024.859999] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3024.867534] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52f4ecb9-17f5-b64c-1096-c71dab316e30, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3025.370666] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52f4ecb9-17f5-b64c-1096-c71dab316e30, 'name': SearchDatastore_Task, 'duration_secs': 0.010814} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3025.370989] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/fe3ff768-4508-4f71-bdbf-cc790f67e325 is no longer used. Deleting! [ 3025.371155] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/fe3ff768-4508-4f71-bdbf-cc790f67e325 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3025.371418] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2def4b76-c567-4442-b467-75f4e2d33e0c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3025.377194] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3025.377194] env[61215]: value = "task-1690531" [ 3025.377194] env[61215]: _type = "Task" [ 3025.377194] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3025.384388] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690531, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3025.887682] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690531, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.124963} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3025.888027] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3025.888089] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/fe3ff768-4508-4f71-bdbf-cc790f67e325" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3025.888319] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/a7505ec5-0178-48ad-bfb6-d066d90f3cd6" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3025.888436] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/a7505ec5-0178-48ad-bfb6-d066d90f3cd6" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3025.888763] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/a7505ec5-0178-48ad-bfb6-d066d90f3cd6" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3025.889042] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0d4f1da-9f51-4c75-9f29-4b4c34e3fb5f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3025.893074] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3025.893074] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52b68953-00b4-8173-e6f4-2f3cc3f0e237" [ 3025.893074] env[61215]: _type = "Task" [ 3025.893074] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3025.900117] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52b68953-00b4-8173-e6f4-2f3cc3f0e237, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3026.403329] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52b68953-00b4-8173-e6f4-2f3cc3f0e237, 'name': SearchDatastore_Task, 'duration_secs': 0.41756} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3026.403672] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/a7505ec5-0178-48ad-bfb6-d066d90f3cd6 is no longer used. Deleting! [ 3026.403826] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/a7505ec5-0178-48ad-bfb6-d066d90f3cd6 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3026.404107] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-75f5a9de-2c2c-4973-8279-b32b516317ae {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3026.410227] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3026.410227] env[61215]: value = "task-1690532" [ 3026.410227] env[61215]: _type = "Task" [ 3026.410227] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3026.417436] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690532, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3026.919941] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690532, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.101117} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3026.920276] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3026.920341] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/a7505ec5-0178-48ad-bfb6-d066d90f3cd6" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3026.920569] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/f17e5f5d-f85b-4a76-ab6b-4775aead8e18" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3026.920689] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/f17e5f5d-f85b-4a76-ab6b-4775aead8e18" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3026.921021] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/f17e5f5d-f85b-4a76-ab6b-4775aead8e18" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3026.921302] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d9098ea-48c6-4604-89fd-0602189b5d16 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3026.925683] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3026.925683] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52177d11-f814-7278-76ca-c4086c138007" [ 3026.925683] env[61215]: _type = "Task" [ 3026.925683] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3026.934337] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52177d11-f814-7278-76ca-c4086c138007, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3027.436543] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52177d11-f814-7278-76ca-c4086c138007, 'name': SearchDatastore_Task, 'duration_secs': 0.053627} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3027.436882] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/f17e5f5d-f85b-4a76-ab6b-4775aead8e18 is no longer used. Deleting! [ 3027.437037] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/f17e5f5d-f85b-4a76-ab6b-4775aead8e18 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3027.437301] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4c3f8c99-51fc-40c4-a661-974681cacb8a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3027.443115] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3027.443115] env[61215]: value = "task-1690533" [ 3027.443115] env[61215]: _type = "Task" [ 3027.443115] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3027.451178] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690533, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3027.953226] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690533, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.10228} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3027.953558] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3027.953627] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/f17e5f5d-f85b-4a76-ab6b-4775aead8e18" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3027.953876] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/7fca1ccb-4e58-41b5-af7b-96007308c0ef" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3027.954039] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/7fca1ccb-4e58-41b5-af7b-96007308c0ef" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3027.954361] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/7fca1ccb-4e58-41b5-af7b-96007308c0ef" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3027.954658] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4456c75a-fcdf-435f-82f2-186b213b63dc {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3027.959019] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3027.959019] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52f94ee1-d3cc-6fab-3884-7b90ca647061" [ 3027.959019] env[61215]: _type = "Task" [ 3027.959019] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3027.967328] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52f94ee1-d3cc-6fab-3884-7b90ca647061, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3028.470147] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52f94ee1-d3cc-6fab-3884-7b90ca647061, 'name': SearchDatastore_Task, 'duration_secs': 0.009286} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3028.470424] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/7fca1ccb-4e58-41b5-af7b-96007308c0ef is no longer used. Deleting! [ 3028.470596] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/7fca1ccb-4e58-41b5-af7b-96007308c0ef {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3028.470881] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bfe88e97-7f9a-4434-ad6b-7d6e36901df1 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3028.477432] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3028.477432] env[61215]: value = "task-1690534" [ 3028.477432] env[61215]: _type = "Task" [ 3028.477432] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3028.485017] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690534, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3028.987913] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690534, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.101778} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3028.988298] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3028.988370] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/7fca1ccb-4e58-41b5-af7b-96007308c0ef" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3028.988528] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/1e0860e2-4aa5-48a7-bc29-49ea8e6f038e" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3028.988647] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/1e0860e2-4aa5-48a7-bc29-49ea8e6f038e" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3028.988978] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/1e0860e2-4aa5-48a7-bc29-49ea8e6f038e" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3028.989243] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9768d64-2a4f-4196-a6b6-48fdc2b1e630 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3028.993474] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3028.993474] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]5254fd28-3a50-0a4c-0c8f-b3eb0e7cb02e" [ 3028.993474] env[61215]: _type = "Task" [ 3028.993474] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3029.000886] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5254fd28-3a50-0a4c-0c8f-b3eb0e7cb02e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3029.504524] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5254fd28-3a50-0a4c-0c8f-b3eb0e7cb02e, 'name': SearchDatastore_Task, 'duration_secs': 0.008707} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3029.504857] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/1e0860e2-4aa5-48a7-bc29-49ea8e6f038e is no longer used. Deleting! [ 3029.505014] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/1e0860e2-4aa5-48a7-bc29-49ea8e6f038e {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3029.505277] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3eb54f19-f231-40d6-a499-92a8ead14d0c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3029.511160] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3029.511160] env[61215]: value = "task-1690535" [ 3029.511160] env[61215]: _type = "Task" [ 3029.511160] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3029.518343] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690535, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3030.022052] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690535, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.096995} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3030.022052] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3030.022052] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/1e0860e2-4aa5-48a7-bc29-49ea8e6f038e" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3030.022052] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/217a4efc-8ff8-4c1e-8c6f-89d67dbe7a86" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3030.022052] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/217a4efc-8ff8-4c1e-8c6f-89d67dbe7a86" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3030.022432] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/217a4efc-8ff8-4c1e-8c6f-89d67dbe7a86" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3030.022432] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-82b30b1a-00ea-4b67-b02f-bba9b0f081b8 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3030.026829] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3030.026829] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52f7f032-fa35-44b9-967c-bb0d2e800794" [ 3030.026829] env[61215]: _type = "Task" [ 3030.026829] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3030.034385] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52f7f032-fa35-44b9-967c-bb0d2e800794, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3030.537711] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52f7f032-fa35-44b9-967c-bb0d2e800794, 'name': SearchDatastore_Task, 'duration_secs': 0.008666} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3030.538042] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/217a4efc-8ff8-4c1e-8c6f-89d67dbe7a86 is no longer used. Deleting! [ 3030.538198] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/217a4efc-8ff8-4c1e-8c6f-89d67dbe7a86 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3030.538462] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-788dc609-fdca-43ea-8191-7b9d881df489 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3030.544573] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3030.544573] env[61215]: value = "task-1690536" [ 3030.544573] env[61215]: _type = "Task" [ 3030.544573] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3030.551765] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690536, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3031.054805] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690536, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.116295} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3031.055114] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3031.055238] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/217a4efc-8ff8-4c1e-8c6f-89d67dbe7a86" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3031.055460] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/3e1cddab-30ae-46b9-aa3e-b6cb1e7b0f9a" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3031.055578] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/3e1cddab-30ae-46b9-aa3e-b6cb1e7b0f9a" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3031.055890] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/3e1cddab-30ae-46b9-aa3e-b6cb1e7b0f9a" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3031.056220] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74c01105-161c-4f2f-81cb-5b3d69078b71 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3031.060756] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3031.060756] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]526406d1-3b3b-206c-46bb-2c6e797624b9" [ 3031.060756] env[61215]: _type = "Task" [ 3031.060756] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3031.069895] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]526406d1-3b3b-206c-46bb-2c6e797624b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3031.571537] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]526406d1-3b3b-206c-46bb-2c6e797624b9, 'name': SearchDatastore_Task, 'duration_secs': 0.009676} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3031.571910] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/3e1cddab-30ae-46b9-aa3e-b6cb1e7b0f9a is no longer used. Deleting! [ 3031.572070] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/3e1cddab-30ae-46b9-aa3e-b6cb1e7b0f9a {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3031.572343] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-67199c0b-4bc1-4c3e-891b-f624fee632a3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3031.578488] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3031.578488] env[61215]: value = "task-1690537" [ 3031.578488] env[61215]: _type = "Task" [ 3031.578488] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3031.585854] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690537, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3032.091395] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690537, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.09778} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3032.091758] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3032.092073] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/3e1cddab-30ae-46b9-aa3e-b6cb1e7b0f9a" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3032.092251] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/38183c5c-65a6-44a9-a790-7887aab2a27c" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3032.128261] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/38183c5c-65a6-44a9-a790-7887aab2a27c" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3032.128457] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/38183c5c-65a6-44a9-a790-7887aab2a27c" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3032.128776] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba620201-245a-4f57-915f-bc3e7fa8db6d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3032.133504] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3032.133504] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52c82784-d645-a713-3b88-a23614e7de3c" [ 3032.133504] env[61215]: _type = "Task" [ 3032.133504] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3032.141148] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52c82784-d645-a713-3b88-a23614e7de3c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3032.644443] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52c82784-d645-a713-3b88-a23614e7de3c, 'name': SearchDatastore_Task, 'duration_secs': 0.008864} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3032.644790] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/38183c5c-65a6-44a9-a790-7887aab2a27c is no longer used. Deleting! [ 3032.644956] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/38183c5c-65a6-44a9-a790-7887aab2a27c {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3032.645222] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-be26e4b5-9154-4106-b869-b2ff16d39b0a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3032.651651] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3032.651651] env[61215]: value = "task-1690538" [ 3032.651651] env[61215]: _type = "Task" [ 3032.651651] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3032.659021] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690538, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3033.161481] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690538, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.130019} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3033.161782] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3033.161937] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/38183c5c-65a6-44a9-a790-7887aab2a27c" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3033.162175] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/c1d587ae-67a3-4f10-8d61-599b3786d3a4" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3033.162295] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/c1d587ae-67a3-4f10-8d61-599b3786d3a4" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3033.162601] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/c1d587ae-67a3-4f10-8d61-599b3786d3a4" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3033.162869] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-945773c2-5876-4324-9fd3-d0c9a0b8c778 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3033.167143] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3033.167143] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]525ddd64-c0e2-d6d6-6513-022786125441" [ 3033.167143] env[61215]: _type = "Task" [ 3033.167143] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3033.174644] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]525ddd64-c0e2-d6d6-6513-022786125441, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3033.678333] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]525ddd64-c0e2-d6d6-6513-022786125441, 'name': SearchDatastore_Task, 'duration_secs': 0.010485} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3033.678667] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/c1d587ae-67a3-4f10-8d61-599b3786d3a4 is no longer used. Deleting! [ 3033.678823] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/c1d587ae-67a3-4f10-8d61-599b3786d3a4 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3033.679106] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ba2bbc80-7133-49dd-bdad-f05294acfb6d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3033.685580] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3033.685580] env[61215]: value = "task-1690539" [ 3033.685580] env[61215]: _type = "Task" [ 3033.685580] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3034.195857] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690539, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.179882} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3034.196232] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3034.196279] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/c1d587ae-67a3-4f10-8d61-599b3786d3a4" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3034.196482] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/2a9f7ead-9398-4d08-b589-bb77db498316" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3034.196600] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/2a9f7ead-9398-4d08-b589-bb77db498316" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3034.196923] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/2a9f7ead-9398-4d08-b589-bb77db498316" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3034.197209] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a9c11970-a319-4b73-828a-e0e1f59f91c2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3034.201518] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3034.201518] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52d56e54-0b5c-cbe0-991b-ad45677c978b" [ 3034.201518] env[61215]: _type = "Task" [ 3034.201518] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3034.208751] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52d56e54-0b5c-cbe0-991b-ad45677c978b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3034.711823] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52d56e54-0b5c-cbe0-991b-ad45677c978b, 'name': SearchDatastore_Task, 'duration_secs': 0.009477} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3034.712147] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/2a9f7ead-9398-4d08-b589-bb77db498316 is no longer used. Deleting! [ 3034.712296] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/2a9f7ead-9398-4d08-b589-bb77db498316 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3034.712559] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-455ebdf3-02a8-40d1-a468-edd899b392dd {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3034.718479] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3034.718479] env[61215]: value = "task-1690540" [ 3034.718479] env[61215]: _type = "Task" [ 3034.718479] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3034.725926] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690540, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3035.228685] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690540, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.113188} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3035.229123] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3035.229123] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/2a9f7ead-9398-4d08-b589-bb77db498316" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3035.229292] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/bc7efd6e-6d44-43c2-89a3-057ef60e6fd0" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3035.229413] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/bc7efd6e-6d44-43c2-89a3-057ef60e6fd0" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3035.229753] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/bc7efd6e-6d44-43c2-89a3-057ef60e6fd0" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3035.230034] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d7085fe-9c8c-4b2d-9622-bded352f2553 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3035.234160] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3035.234160] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52fd1d84-7b2e-1836-7494-c0a1305c0bcf" [ 3035.234160] env[61215]: _type = "Task" [ 3035.234160] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3035.241842] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52fd1d84-7b2e-1836-7494-c0a1305c0bcf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3035.745117] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52fd1d84-7b2e-1836-7494-c0a1305c0bcf, 'name': SearchDatastore_Task, 'duration_secs': 0.281844} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3035.745447] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/bc7efd6e-6d44-43c2-89a3-057ef60e6fd0 is no longer used. Deleting! [ 3035.745594] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/bc7efd6e-6d44-43c2-89a3-057ef60e6fd0 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3035.745861] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-90ba9215-de4b-44cc-82fa-4af90b61e239 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3035.752153] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3035.752153] env[61215]: value = "task-1690541" [ 3035.752153] env[61215]: _type = "Task" [ 3035.752153] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3035.759428] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690541, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3036.263104] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690541, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.111665} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3036.263104] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3036.263104] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/bc7efd6e-6d44-43c2-89a3-057ef60e6fd0" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3036.263104] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/58e7a587-392f-4ebc-8cd3-f8a7dee378c1" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3036.263104] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/58e7a587-392f-4ebc-8cd3-f8a7dee378c1" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3036.263599] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/58e7a587-392f-4ebc-8cd3-f8a7dee378c1" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3036.263599] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-faa8e514-b565-450b-b289-94c2db998824 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3036.268038] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3036.268038] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]5224ebf5-7100-dfdd-599c-b4887bdb2d64" [ 3036.268038] env[61215]: _type = "Task" [ 3036.268038] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3036.275431] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5224ebf5-7100-dfdd-599c-b4887bdb2d64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3036.780780] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5224ebf5-7100-dfdd-599c-b4887bdb2d64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3037.279199] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5224ebf5-7100-dfdd-599c-b4887bdb2d64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3037.780560] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5224ebf5-7100-dfdd-599c-b4887bdb2d64, 'name': SearchDatastore_Task, 'duration_secs': 1.482148} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3037.781770] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/58e7a587-392f-4ebc-8cd3-f8a7dee378c1 is no longer used. Deleting! [ 3037.781770] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/58e7a587-392f-4ebc-8cd3-f8a7dee378c1 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3037.781770] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-18678a8f-da36-4fe7-bd7f-b3732c166644 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3037.786989] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3037.786989] env[61215]: value = "task-1690542" [ 3037.786989] env[61215]: _type = "Task" [ 3037.786989] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3037.794174] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690542, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3038.297997] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690542, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.115056} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3038.298485] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3038.298725] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/58e7a587-392f-4ebc-8cd3-f8a7dee378c1" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3038.299034] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/9247edf4-a122-44d1-be5f-7d303a52a4ac" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3038.299175] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/9247edf4-a122-44d1-be5f-7d303a52a4ac" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3038.299631] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/9247edf4-a122-44d1-be5f-7d303a52a4ac" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3038.299927] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d6d2f97-a6a7-47fb-9761-082854cc8050 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3038.304636] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3038.304636] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]5262f9fd-a221-f4b9-1ef5-4ce1ab2a7143" [ 3038.304636] env[61215]: _type = "Task" [ 3038.304636] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3038.312842] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5262f9fd-a221-f4b9-1ef5-4ce1ab2a7143, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3038.816142] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5262f9fd-a221-f4b9-1ef5-4ce1ab2a7143, 'name': SearchDatastore_Task, 'duration_secs': 0.009192} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3038.816546] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/9247edf4-a122-44d1-be5f-7d303a52a4ac is no longer used. Deleting! [ 3038.816761] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/9247edf4-a122-44d1-be5f-7d303a52a4ac {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3038.817132] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-812819b2-d097-4db5-981a-7733d1724095 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3038.824306] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3038.824306] env[61215]: value = "task-1690543" [ 3038.824306] env[61215]: _type = "Task" [ 3038.824306] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3038.832527] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690543, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3039.333926] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690543, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.11323} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3039.334226] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3039.334346] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/9247edf4-a122-44d1-be5f-7d303a52a4ac" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3039.334571] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/e85853cc-33ff-4df6-aa9d-eb02e938717e" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3039.334721] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/e85853cc-33ff-4df6-aa9d-eb02e938717e" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3039.335063] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/e85853cc-33ff-4df6-aa9d-eb02e938717e" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3039.335333] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cad1fdbd-0514-445f-a2f9-285f0f66618a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3039.339847] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3039.339847] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52f43c83-3861-8475-0bff-128ace505154" [ 3039.339847] env[61215]: _type = "Task" [ 3039.339847] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3039.347394] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52f43c83-3861-8475-0bff-128ace505154, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3039.850850] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52f43c83-3861-8475-0bff-128ace505154, 'name': SearchDatastore_Task, 'duration_secs': 0.009527} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3039.851165] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/e85853cc-33ff-4df6-aa9d-eb02e938717e is no longer used. Deleting! [ 3039.851320] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/e85853cc-33ff-4df6-aa9d-eb02e938717e {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3039.851575] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f256c656-f624-4625-8c3f-71008b04582d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3039.857659] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3039.857659] env[61215]: value = "task-1690544" [ 3039.857659] env[61215]: _type = "Task" [ 3039.857659] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3039.866054] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690544, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3040.368479] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690544, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.105373} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3040.368849] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3040.368899] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/e85853cc-33ff-4df6-aa9d-eb02e938717e" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3040.369119] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/78b0977c-33d1-40c7-836f-08f17ea59460" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3040.369244] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/78b0977c-33d1-40c7-836f-08f17ea59460" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3040.369584] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/78b0977c-33d1-40c7-836f-08f17ea59460" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3040.371659] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6bf500d8-04ff-4390-befc-6dae5247d4d6 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3040.376024] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3040.376024] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]524aa434-7953-63ed-45b0-911451284fd2" [ 3040.376024] env[61215]: _type = "Task" [ 3040.376024] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3040.381874] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]524aa434-7953-63ed-45b0-911451284fd2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3040.888173] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]524aa434-7953-63ed-45b0-911451284fd2, 'name': SearchDatastore_Task, 'duration_secs': 0.009465} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3040.888173] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/78b0977c-33d1-40c7-836f-08f17ea59460 is no longer used. Deleting! [ 3040.888173] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/78b0977c-33d1-40c7-836f-08f17ea59460 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3040.888173] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d20787dd-262b-44aa-ad94-1d645ef92efd {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3040.895020] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3040.895020] env[61215]: value = "task-1690545" [ 3040.895020] env[61215]: _type = "Task" [ 3040.895020] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3040.900113] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690545, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3041.404058] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690545, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.107752} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3041.404058] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3041.404058] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/78b0977c-33d1-40c7-836f-08f17ea59460" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3041.404058] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/e707584b-0a40-4d72-896f-5b5fb276b3bd" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3041.404058] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/e707584b-0a40-4d72-896f-5b5fb276b3bd" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3041.404058] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/e707584b-0a40-4d72-896f-5b5fb276b3bd" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3041.404058] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e4c54be-01f5-439c-87a4-808820b8fb70 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3041.407654] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3041.407654] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52933508-74c5-a121-484d-c892c2337521" [ 3041.407654] env[61215]: _type = "Task" [ 3041.407654] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3041.414872] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52933508-74c5-a121-484d-c892c2337521, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3041.918642] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52933508-74c5-a121-484d-c892c2337521, 'name': SearchDatastore_Task, 'duration_secs': 0.009607} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3041.919081] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/e707584b-0a40-4d72-896f-5b5fb276b3bd is no longer used. Deleting! [ 3041.919244] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/e707584b-0a40-4d72-896f-5b5fb276b3bd {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3041.919505] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3c0c3a92-c78b-4b72-ae87-a5f33e51020b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3041.925474] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3041.925474] env[61215]: value = "task-1690546" [ 3041.925474] env[61215]: _type = "Task" [ 3041.925474] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3041.932668] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690546, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3042.435317] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690546, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.107944} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3042.435610] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3042.435810] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/e707584b-0a40-4d72-896f-5b5fb276b3bd" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3042.436117] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/08d7c093-b532-44ed-92d4-7196f6ff0b89" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3042.436295] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/08d7c093-b532-44ed-92d4-7196f6ff0b89" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3042.436641] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/08d7c093-b532-44ed-92d4-7196f6ff0b89" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3042.436964] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a38373ac-e707-4f4b-b3c5-1eec59ec6615 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3042.441431] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3042.441431] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]5282ecd4-0c6a-60e5-3fb6-b091ccc036e2" [ 3042.441431] env[61215]: _type = "Task" [ 3042.441431] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3042.448936] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5282ecd4-0c6a-60e5-3fb6-b091ccc036e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3042.952671] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5282ecd4-0c6a-60e5-3fb6-b091ccc036e2, 'name': SearchDatastore_Task, 'duration_secs': 0.009952} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3042.952972] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/08d7c093-b532-44ed-92d4-7196f6ff0b89 is no longer used. Deleting! [ 3042.953144] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/08d7c093-b532-44ed-92d4-7196f6ff0b89 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3042.953414] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e66b3062-9f4e-4ca1-8a0f-de084ef1742e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3042.960044] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3042.960044] env[61215]: value = "task-1690547" [ 3042.960044] env[61215]: _type = "Task" [ 3042.960044] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3042.967852] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690547, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3043.470434] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690547, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.099491} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3043.470773] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3043.470934] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/08d7c093-b532-44ed-92d4-7196f6ff0b89" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3043.471281] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/fcfd55ea-bb0e-499c-9cf3-b17daa3137c9" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3043.471475] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/fcfd55ea-bb0e-499c-9cf3-b17daa3137c9" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3043.471867] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/fcfd55ea-bb0e-499c-9cf3-b17daa3137c9" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3043.472242] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd00d3bb-37c6-4f5b-bff6-9c3d099faa33 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3043.478215] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3043.478215] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52bc62ff-75d7-b1ec-1a0d-0542d20c6948" [ 3043.478215] env[61215]: _type = "Task" [ 3043.478215] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3043.486610] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52bc62ff-75d7-b1ec-1a0d-0542d20c6948, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3043.989269] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52bc62ff-75d7-b1ec-1a0d-0542d20c6948, 'name': SearchDatastore_Task, 'duration_secs': 0.00931} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3043.989671] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/fcfd55ea-bb0e-499c-9cf3-b17daa3137c9 is no longer used. Deleting! [ 3043.989820] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/fcfd55ea-bb0e-499c-9cf3-b17daa3137c9 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3043.990100] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-54fe5343-b8a5-4674-9113-8f39c69e26bb {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3043.996397] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3043.996397] env[61215]: value = "task-1690548" [ 3043.996397] env[61215]: _type = "Task" [ 3043.996397] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3044.004895] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690548, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3044.506367] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690548, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.168264} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3044.506641] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3044.506795] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/fcfd55ea-bb0e-499c-9cf3-b17daa3137c9" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3044.506990] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/2ca279e0-ada3-4d99-ab43-14f184a1f210" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3044.507133] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/2ca279e0-ada3-4d99-ab43-14f184a1f210" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3044.507450] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/2ca279e0-ada3-4d99-ab43-14f184a1f210" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3044.507724] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed1783ec-8637-4087-8253-d99a9908a442 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3044.512181] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3044.512181] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52456958-e6b2-39a3-9dac-da3c02c09b4e" [ 3044.512181] env[61215]: _type = "Task" [ 3044.512181] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3044.519483] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52456958-e6b2-39a3-9dac-da3c02c09b4e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3045.022723] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52456958-e6b2-39a3-9dac-da3c02c09b4e, 'name': SearchDatastore_Task, 'duration_secs': 0.009143} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3045.023028] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/2ca279e0-ada3-4d99-ab43-14f184a1f210 is no longer used. Deleting! [ 3045.023230] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/2ca279e0-ada3-4d99-ab43-14f184a1f210 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3045.023503] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6d46f5b4-db73-47d9-a56c-6b1ccada4a9f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3045.029317] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3045.029317] env[61215]: value = "task-1690549" [ 3045.029317] env[61215]: _type = "Task" [ 3045.029317] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3045.036738] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690549, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3045.538980] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690549, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.109515} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3045.539373] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3045.539373] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/2ca279e0-ada3-4d99-ab43-14f184a1f210" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3045.539596] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/8bcecce4-adda-439a-ac6b-313386b02663" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3045.539715] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/8bcecce4-adda-439a-ac6b-313386b02663" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3045.540051] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/8bcecce4-adda-439a-ac6b-313386b02663" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3045.540322] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2fc89389-55b0-4990-940d-d996739ec83d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3045.544815] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3045.544815] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]5239f172-897d-a564-84ac-2448c7a449d4" [ 3045.544815] env[61215]: _type = "Task" [ 3045.544815] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3045.551980] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5239f172-897d-a564-84ac-2448c7a449d4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3046.055507] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5239f172-897d-a564-84ac-2448c7a449d4, 'name': SearchDatastore_Task, 'duration_secs': 0.009632} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3046.055822] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/8bcecce4-adda-439a-ac6b-313386b02663 is no longer used. Deleting! [ 3046.055977] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/8bcecce4-adda-439a-ac6b-313386b02663 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3046.056279] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e54b752d-def6-46c6-9a6a-b4fc03ae479d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3046.062581] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3046.062581] env[61215]: value = "task-1690550" [ 3046.062581] env[61215]: _type = "Task" [ 3046.062581] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3046.070048] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690550, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3046.572194] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690550, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.111739} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3046.572540] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3046.572540] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/8bcecce4-adda-439a-ac6b-313386b02663" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3046.572760] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/e96ef2f2-c9dd-4d18-abcd-cf7d64d69cc7" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3046.572881] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/e96ef2f2-c9dd-4d18-abcd-cf7d64d69cc7" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3046.573251] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/e96ef2f2-c9dd-4d18-abcd-cf7d64d69cc7" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3046.573515] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1eb73a28-5b38-4823-9ae3-d730cbaf3ad0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3046.577653] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3046.577653] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52087556-47de-a544-7351-564d9521f613" [ 3046.577653] env[61215]: _type = "Task" [ 3046.577653] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3046.585026] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52087556-47de-a544-7351-564d9521f613, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3047.089059] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52087556-47de-a544-7351-564d9521f613, 'name': SearchDatastore_Task, 'duration_secs': 0.010921} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3047.089059] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/e96ef2f2-c9dd-4d18-abcd-cf7d64d69cc7 is no longer used. Deleting! [ 3047.089259] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/e96ef2f2-c9dd-4d18-abcd-cf7d64d69cc7 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3047.089480] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eba26d4c-bd7f-4b0e-96b2-dbfaab37e86b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3047.096909] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3047.096909] env[61215]: value = "task-1690551" [ 3047.096909] env[61215]: _type = "Task" [ 3047.096909] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3047.104612] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690551, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3047.609024] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690551, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.105545} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3047.609024] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3047.609024] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/e96ef2f2-c9dd-4d18-abcd-cf7d64d69cc7" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3047.609024] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/e9775c62-1e83-4c3b-aca1-5151afa0bb0c" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3047.609024] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/e9775c62-1e83-4c3b-aca1-5151afa0bb0c" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3047.609619] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/e9775c62-1e83-4c3b-aca1-5151afa0bb0c" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3047.609619] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ce31627-be02-4676-a498-22ee6d3b9dce {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3047.613926] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3047.613926] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]5237ced3-04c2-b39a-c74d-316d18bb2c2d" [ 3047.613926] env[61215]: _type = "Task" [ 3047.613926] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3047.621182] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5237ced3-04c2-b39a-c74d-316d18bb2c2d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3048.124718] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5237ced3-04c2-b39a-c74d-316d18bb2c2d, 'name': SearchDatastore_Task, 'duration_secs': 0.009734} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3048.124974] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/e9775c62-1e83-4c3b-aca1-5151afa0bb0c is no longer used. Deleting! [ 3048.125140] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/e9775c62-1e83-4c3b-aca1-5151afa0bb0c {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3048.125399] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f80be296-cf91-46e2-9f60-3c58c9bfe852 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3048.131430] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3048.131430] env[61215]: value = "task-1690552" [ 3048.131430] env[61215]: _type = "Task" [ 3048.131430] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3048.139455] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690552, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3048.640917] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690552, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.11804} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3048.641299] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3048.641338] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/e9775c62-1e83-4c3b-aca1-5151afa0bb0c" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3048.641555] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/867bb889-d811-4f58-9248-aa2baf0f41fe" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3048.641700] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/867bb889-d811-4f58-9248-aa2baf0f41fe" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3048.641991] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/867bb889-d811-4f58-9248-aa2baf0f41fe" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3048.642280] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3051e0f5-8a9a-4c40-b536-89b905cc089f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3048.646495] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3048.646495] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]520c885f-8526-900d-0c95-aaf42f5f95ba" [ 3048.646495] env[61215]: _type = "Task" [ 3048.646495] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3048.653657] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]520c885f-8526-900d-0c95-aaf42f5f95ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3049.157642] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]520c885f-8526-900d-0c95-aaf42f5f95ba, 'name': SearchDatastore_Task, 'duration_secs': 0.009678} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3049.157966] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/867bb889-d811-4f58-9248-aa2baf0f41fe is no longer used. Deleting! [ 3049.158186] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/867bb889-d811-4f58-9248-aa2baf0f41fe {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3049.158428] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-37b6adb4-02d7-4faa-8fef-b224ef7a6c43 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3049.166249] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3049.166249] env[61215]: value = "task-1690553" [ 3049.166249] env[61215]: _type = "Task" [ 3049.166249] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3049.179299] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690553, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3049.676761] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690553, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.1202} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3049.677216] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3049.677216] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/867bb889-d811-4f58-9248-aa2baf0f41fe" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3049.677400] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/2652b78d-cf46-44a6-aaf2-217222a11226" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3049.677519] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/2652b78d-cf46-44a6-aaf2-217222a11226" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3049.677932] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/2652b78d-cf46-44a6-aaf2-217222a11226" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3049.678231] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07d00954-850c-4e9b-801f-4e64bea4a63d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3049.682597] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3049.682597] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52826549-0019-d3e2-6449-3b0cbdbd90d9" [ 3049.682597] env[61215]: _type = "Task" [ 3049.682597] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3049.690888] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52826549-0019-d3e2-6449-3b0cbdbd90d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3050.193079] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52826549-0019-d3e2-6449-3b0cbdbd90d9, 'name': SearchDatastore_Task, 'duration_secs': 0.009626} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3050.193433] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/2652b78d-cf46-44a6-aaf2-217222a11226 is no longer used. Deleting! [ 3050.193587] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/2652b78d-cf46-44a6-aaf2-217222a11226 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3050.193871] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-22e0856d-0c08-4af0-9515-797858ec49d9 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3050.199968] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3050.199968] env[61215]: value = "task-1690554" [ 3050.199968] env[61215]: _type = "Task" [ 3050.199968] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3050.207295] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690554, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3050.711790] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690554, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3051.210692] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690554, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.628493} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3051.210914] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3051.211192] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/2652b78d-cf46-44a6-aaf2-217222a11226" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3051.211343] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/4e6e0dcd-0803-48fe-9134-be239aca4c70" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3051.211462] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/4e6e0dcd-0803-48fe-9134-be239aca4c70" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3051.211780] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/4e6e0dcd-0803-48fe-9134-be239aca4c70" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3051.212064] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f7c29df-4214-4958-b6cf-acb0ce7e577e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3051.216345] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3051.216345] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]524eab7e-f5af-fcdb-65c1-25d8f017cf89" [ 3051.216345] env[61215]: _type = "Task" [ 3051.216345] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3051.223621] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]524eab7e-f5af-fcdb-65c1-25d8f017cf89, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3051.727263] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]524eab7e-f5af-fcdb-65c1-25d8f017cf89, 'name': SearchDatastore_Task, 'duration_secs': 0.008544} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3051.727597] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/4e6e0dcd-0803-48fe-9134-be239aca4c70 is no longer used. Deleting! [ 3051.727712] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/4e6e0dcd-0803-48fe-9134-be239aca4c70 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3051.727934] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d7a0afd6-2a08-41e5-9b5b-3009436c3c61 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3051.734069] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3051.734069] env[61215]: value = "task-1690555" [ 3051.734069] env[61215]: _type = "Task" [ 3051.734069] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3051.741245] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690555, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3052.244061] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690555, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.103023} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3052.244337] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3052.244477] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/4e6e0dcd-0803-48fe-9134-be239aca4c70" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3052.244710] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/cd17a7a3-13d9-4900-8ed6-8474a9cef590" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3052.244950] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/cd17a7a3-13d9-4900-8ed6-8474a9cef590" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3052.245276] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/cd17a7a3-13d9-4900-8ed6-8474a9cef590" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3052.245567] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d17f7cc5-4c63-4f3b-badc-56f21fb3492d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3052.250295] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3052.250295] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52563079-823e-b921-55c9-d814a4ca255c" [ 3052.250295] env[61215]: _type = "Task" [ 3052.250295] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3052.258494] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52563079-823e-b921-55c9-d814a4ca255c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3055.229047] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52563079-823e-b921-55c9-d814a4ca255c, 'name': SearchDatastore_Task, 'duration_secs': 0.01006} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3055.229047] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/cd17a7a3-13d9-4900-8ed6-8474a9cef590 is no longer used. Deleting! [ 3055.229047] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/cd17a7a3-13d9-4900-8ed6-8474a9cef590 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3055.229047] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fc647966-e76d-4a2c-8ea7-5f55258a598f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3055.229047] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3055.229047] env[61215]: value = "task-1690556" [ 3055.229047] env[61215]: _type = "Task" [ 3055.229047] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3055.229047] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690556, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3055.229047] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690556, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.117319} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3055.229047] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3055.229047] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/cd17a7a3-13d9-4900-8ed6-8474a9cef590" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3055.229047] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/cfd7dda5-dd02-4e1a-a12e-b39abc6db212" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3055.229047] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/cfd7dda5-dd02-4e1a-a12e-b39abc6db212" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3055.230698] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/cfd7dda5-dd02-4e1a-a12e-b39abc6db212" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3055.230698] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7bb0d2d0-03e6-425d-b82f-f8af6c5b8cb2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3055.235309] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3055.235309] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52ef8472-8ee8-4e80-cbed-a7475c2f5779" [ 3055.235309] env[61215]: _type = "Task" [ 3055.235309] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3055.243267] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52ef8472-8ee8-4e80-cbed-a7475c2f5779, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3055.747057] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52ef8472-8ee8-4e80-cbed-a7475c2f5779, 'name': SearchDatastore_Task, 'duration_secs': 0.018003} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3055.747057] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/cfd7dda5-dd02-4e1a-a12e-b39abc6db212 is no longer used. Deleting! [ 3055.747347] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/cfd7dda5-dd02-4e1a-a12e-b39abc6db212 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3055.747391] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-97e62ba5-35b1-4d2b-a7f1-b8b657246984 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3055.753852] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3055.753852] env[61215]: value = "task-1690557" [ 3055.753852] env[61215]: _type = "Task" [ 3055.753852] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3055.761409] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690557, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3056.263805] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690557, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.106613} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3056.264098] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3056.264244] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/cfd7dda5-dd02-4e1a-a12e-b39abc6db212" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3056.264461] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/d7f8e561-f83d-45e7-8346-adf0696f31ac" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3056.264579] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/d7f8e561-f83d-45e7-8346-adf0696f31ac" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3056.264931] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/d7f8e561-f83d-45e7-8346-adf0696f31ac" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3056.265219] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c282060b-ed81-4bf6-bbb3-ca93ab3ac1ce {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3056.269762] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3056.269762] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]525a5273-7c66-0d46-86a1-cd8c803c9eb0" [ 3056.269762] env[61215]: _type = "Task" [ 3056.269762] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3056.277159] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]525a5273-7c66-0d46-86a1-cd8c803c9eb0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3056.781142] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]525a5273-7c66-0d46-86a1-cd8c803c9eb0, 'name': SearchDatastore_Task, 'duration_secs': 0.009364} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3056.781142] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/d7f8e561-f83d-45e7-8346-adf0696f31ac is no longer used. Deleting! [ 3056.781338] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/d7f8e561-f83d-45e7-8346-adf0696f31ac {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3056.781447] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-12345d46-52e4-41c6-aadc-817ae42a40a8 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3056.787819] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3056.787819] env[61215]: value = "task-1690558" [ 3056.787819] env[61215]: _type = "Task" [ 3056.787819] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3056.795737] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690558, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3057.298389] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690558, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.111439} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3057.298730] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3057.298766] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/d7f8e561-f83d-45e7-8346-adf0696f31ac" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3057.298986] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/5e252684-9940-4dfc-a50d-13a9c36d8207" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3057.299123] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/5e252684-9940-4dfc-a50d-13a9c36d8207" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3057.299455] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/5e252684-9940-4dfc-a50d-13a9c36d8207" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3057.299721] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4405a74-6f00-4e34-8407-bf8079cfb4f7 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3057.303846] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3057.303846] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52bb2cbd-f306-a639-6e09-afa92cd46bcc" [ 3057.303846] env[61215]: _type = "Task" [ 3057.303846] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3057.311122] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52bb2cbd-f306-a639-6e09-afa92cd46bcc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3057.814965] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52bb2cbd-f306-a639-6e09-afa92cd46bcc, 'name': SearchDatastore_Task, 'duration_secs': 0.009741} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3057.815269] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/5e252684-9940-4dfc-a50d-13a9c36d8207 is no longer used. Deleting! [ 3057.815422] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/5e252684-9940-4dfc-a50d-13a9c36d8207 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3057.815683] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-50b145bf-f8f7-40ae-8b5c-5e2119cf5216 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3057.822419] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3057.822419] env[61215]: value = "task-1690559" [ 3057.822419] env[61215]: _type = "Task" [ 3057.822419] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3057.829494] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690559, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3058.334057] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690559, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.112496} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3058.334364] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3058.334506] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/5e252684-9940-4dfc-a50d-13a9c36d8207" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3058.334724] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/73df8433-6d6d-4be5-9126-080fed6c44aa" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3058.334887] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/73df8433-6d6d-4be5-9126-080fed6c44aa" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3058.335335] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/73df8433-6d6d-4be5-9126-080fed6c44aa" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3058.335675] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27c828c9-2d99-498f-b2bf-e4a904d95f04 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3058.340447] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3058.340447] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]525f9ee7-64de-072a-bca5-6a159a98525b" [ 3058.340447] env[61215]: _type = "Task" [ 3058.340447] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3058.349621] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]525f9ee7-64de-072a-bca5-6a159a98525b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3058.852012] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]525f9ee7-64de-072a-bca5-6a159a98525b, 'name': SearchDatastore_Task, 'duration_secs': 0.010638} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3058.852338] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/73df8433-6d6d-4be5-9126-080fed6c44aa is no longer used. Deleting! [ 3058.852483] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/73df8433-6d6d-4be5-9126-080fed6c44aa {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3058.852740] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ce75daff-baea-4b08-b8ea-f6587b7ab387 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3058.858497] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3058.858497] env[61215]: value = "task-1690560" [ 3058.858497] env[61215]: _type = "Task" [ 3058.858497] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3058.866174] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690560, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3059.368653] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690560, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.133714} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3059.368990] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3059.369123] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/73df8433-6d6d-4be5-9126-080fed6c44aa" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3059.369300] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/4f561b7f-8554-40fc-b0ce-106ef7df334e" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3059.369419] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/4f561b7f-8554-40fc-b0ce-106ef7df334e" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3059.369751] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/4f561b7f-8554-40fc-b0ce-106ef7df334e" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3059.370021] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20fd139b-22f0-4295-b914-fa14b59fd1f4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3059.374338] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3059.374338] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52fda3cf-16fb-1436-06e8-8dba4b1a48b7" [ 3059.374338] env[61215]: _type = "Task" [ 3059.374338] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3059.381862] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52fda3cf-16fb-1436-06e8-8dba4b1a48b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3059.887766] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52fda3cf-16fb-1436-06e8-8dba4b1a48b7, 'name': SearchDatastore_Task, 'duration_secs': 0.009231} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3059.888085] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/4f561b7f-8554-40fc-b0ce-106ef7df334e is no longer used. Deleting! [ 3059.888238] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/4f561b7f-8554-40fc-b0ce-106ef7df334e {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3059.888507] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a87de71e-c7a4-41e7-bda2-9dea9d9c3b83 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3059.915561] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3059.915561] env[61215]: value = "task-1690561" [ 3059.915561] env[61215]: _type = "Task" [ 3059.915561] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3059.923653] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690561, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3060.425393] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690561, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.117754} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3060.425804] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3060.425804] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/4f561b7f-8554-40fc-b0ce-106ef7df334e" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3060.426040] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/62867302-ebc1-4c14-ab1f-3120d7e2b6ed" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3060.426170] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/62867302-ebc1-4c14-ab1f-3120d7e2b6ed" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3060.426494] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/62867302-ebc1-4c14-ab1f-3120d7e2b6ed" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3060.426755] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3a74ab8-54e4-4527-b530-49414b800cff {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3060.430914] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3060.430914] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]526c7e75-2e02-2014-1220-ed102ba151f0" [ 3060.430914] env[61215]: _type = "Task" [ 3060.430914] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3060.438404] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]526c7e75-2e02-2014-1220-ed102ba151f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3060.942273] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]526c7e75-2e02-2014-1220-ed102ba151f0, 'name': SearchDatastore_Task, 'duration_secs': 0.008994} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3060.942608] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/62867302-ebc1-4c14-ab1f-3120d7e2b6ed is no longer used. Deleting! [ 3060.942750] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/62867302-ebc1-4c14-ab1f-3120d7e2b6ed {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3060.943029] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cc33dc22-4b0e-475b-8d38-96d5c3787b2c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3060.949472] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3060.949472] env[61215]: value = "task-1690562" [ 3060.949472] env[61215]: _type = "Task" [ 3060.949472] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3060.957241] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690562, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3061.459749] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690562, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138162} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3061.460179] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3061.460179] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/62867302-ebc1-4c14-ab1f-3120d7e2b6ed" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3061.460383] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/5a058243-977f-4193-925a-cbbcbe85aaf0" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3061.460505] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/5a058243-977f-4193-925a-cbbcbe85aaf0" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3061.460841] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/5a058243-977f-4193-925a-cbbcbe85aaf0" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3061.461126] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4b8cb5d-f11e-44d5-a955-10f985609bee {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3061.465444] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3061.465444] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]5232057b-9375-e140-e23c-31fb3530ce6e" [ 3061.465444] env[61215]: _type = "Task" [ 3061.465444] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3061.472814] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5232057b-9375-e140-e23c-31fb3530ce6e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3061.978824] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5232057b-9375-e140-e23c-31fb3530ce6e, 'name': SearchDatastore_Task, 'duration_secs': 0.009874} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3061.979193] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/5a058243-977f-4193-925a-cbbcbe85aaf0 is no longer used. Deleting! [ 3061.979352] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/5a058243-977f-4193-925a-cbbcbe85aaf0 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3061.979617] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a029087f-7b54-4659-90a3-d40fb10847cf {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3061.985484] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3061.985484] env[61215]: value = "task-1690563" [ 3061.985484] env[61215]: _type = "Task" [ 3061.985484] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3061.993030] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690563, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3062.497162] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690563, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.096513} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3062.497577] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3062.497577] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/5a058243-977f-4193-925a-cbbcbe85aaf0" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3062.497868] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/7faec888-10df-410e-8f47-e3f85f2ead2b" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3062.497868] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/7faec888-10df-410e-8f47-e3f85f2ead2b" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3062.498213] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/7faec888-10df-410e-8f47-e3f85f2ead2b" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3062.498470] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bbabfb29-3571-4557-9f2b-24ffb1380818 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3062.502660] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3062.502660] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52b6b31b-58d0-50d9-f130-f3ef97841a97" [ 3062.502660] env[61215]: _type = "Task" [ 3062.502660] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3062.510223] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52b6b31b-58d0-50d9-f130-f3ef97841a97, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3063.014225] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52b6b31b-58d0-50d9-f130-f3ef97841a97, 'name': SearchDatastore_Task, 'duration_secs': 0.00895} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3063.014532] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/7faec888-10df-410e-8f47-e3f85f2ead2b is no longer used. Deleting! [ 3063.014684] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/7faec888-10df-410e-8f47-e3f85f2ead2b {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3063.014955] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5e3ac805-7fa4-43ff-8428-450b02c2d219 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3063.023020] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3063.023020] env[61215]: value = "task-1690564" [ 3063.023020] env[61215]: _type = "Task" [ 3063.023020] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3063.028576] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690564, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3063.531510] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690564, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.120618} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3063.531922] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3063.531922] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/7faec888-10df-410e-8f47-e3f85f2ead2b" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3063.532166] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/d53e107a-6f6b-40ce-b334-20ddabfa6088" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3063.532306] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/d53e107a-6f6b-40ce-b334-20ddabfa6088" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3063.532625] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/d53e107a-6f6b-40ce-b334-20ddabfa6088" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3063.532888] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f0c91c7-adf1-41b0-b7f9-674709f3217a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3063.537129] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3063.537129] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]520845d0-6ec2-3c1c-7859-286a4217bad3" [ 3063.537129] env[61215]: _type = "Task" [ 3063.537129] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3063.544200] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]520845d0-6ec2-3c1c-7859-286a4217bad3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3064.048466] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]520845d0-6ec2-3c1c-7859-286a4217bad3, 'name': SearchDatastore_Task, 'duration_secs': 0.008731} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3064.048466] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/d53e107a-6f6b-40ce-b334-20ddabfa6088 is no longer used. Deleting! [ 3064.048607] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/d53e107a-6f6b-40ce-b334-20ddabfa6088 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3064.048873] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-410bc487-1ed8-419f-9e0d-f144bedb6ac2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3064.056182] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3064.056182] env[61215]: value = "task-1690565" [ 3064.056182] env[61215]: _type = "Task" [ 3064.056182] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3064.062812] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690565, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3064.565592] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690565, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.110163} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3064.565966] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3064.565966] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/d53e107a-6f6b-40ce-b334-20ddabfa6088" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3064.566214] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/bc02ee04-eecf-4ea4-93a1-2ddc4345cf97" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3064.566359] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/bc02ee04-eecf-4ea4-93a1-2ddc4345cf97" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3064.566678] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/bc02ee04-eecf-4ea4-93a1-2ddc4345cf97" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3064.566941] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ecfad64-73b9-4016-937c-4c4f360b809b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3064.571137] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3064.571137] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52ac282e-f549-7ed1-5d30-f44a70665d76" [ 3064.571137] env[61215]: _type = "Task" [ 3064.571137] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3064.578199] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52ac282e-f549-7ed1-5d30-f44a70665d76, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3065.081937] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52ac282e-f549-7ed1-5d30-f44a70665d76, 'name': SearchDatastore_Task, 'duration_secs': 0.009718} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3065.082307] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/bc02ee04-eecf-4ea4-93a1-2ddc4345cf97 is no longer used. Deleting! [ 3065.082478] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/bc02ee04-eecf-4ea4-93a1-2ddc4345cf97 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3065.082744] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-786f2713-42cd-40fd-957c-7af94512600c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3065.088610] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3065.088610] env[61215]: value = "task-1690566" [ 3065.088610] env[61215]: _type = "Task" [ 3065.088610] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3065.095803] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690566, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3065.598385] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690566, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.106513} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3065.598760] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3065.598800] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/bc02ee04-eecf-4ea4-93a1-2ddc4345cf97" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3065.599012] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/054d54ae-f4ae-4c8b-899a-95996301b509" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3065.599137] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/054d54ae-f4ae-4c8b-899a-95996301b509" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3065.599519] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/054d54ae-f4ae-4c8b-899a-95996301b509" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3065.599803] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2ba02f0-cfeb-48d8-aa94-af46b74d3e10 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3065.604267] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3065.604267] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]523aee2d-3e8c-c686-68cc-29c64f904e8a" [ 3065.604267] env[61215]: _type = "Task" [ 3065.604267] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3065.612541] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]523aee2d-3e8c-c686-68cc-29c64f904e8a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3066.115311] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]523aee2d-3e8c-c686-68cc-29c64f904e8a, 'name': SearchDatastore_Task, 'duration_secs': 0.009811} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3066.115632] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/054d54ae-f4ae-4c8b-899a-95996301b509 is no longer used. Deleting! [ 3066.115779] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/054d54ae-f4ae-4c8b-899a-95996301b509 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3066.116050] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1e4abf2d-72e9-4457-9442-94a60a8cc9a9 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3066.122614] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3066.122614] env[61215]: value = "task-1690567" [ 3066.122614] env[61215]: _type = "Task" [ 3066.122614] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3066.129732] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690567, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3066.632841] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690567, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.105867} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3066.633544] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3066.636473] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/054d54ae-f4ae-4c8b-899a-95996301b509" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3066.636473] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/861cbb94-00a7-4cf9-9e2f-979a61cdabe5" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3066.636473] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/861cbb94-00a7-4cf9-9e2f-979a61cdabe5" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3066.636473] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/861cbb94-00a7-4cf9-9e2f-979a61cdabe5" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3066.636473] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3de7da0b-65e6-44d9-aa92-6e8ca9251315 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3066.639165] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3066.639165] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]521278b4-579a-973c-9c83-7f19d007a037" [ 3066.639165] env[61215]: _type = "Task" [ 3066.639165] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3066.646917] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]521278b4-579a-973c-9c83-7f19d007a037, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3067.152037] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]521278b4-579a-973c-9c83-7f19d007a037, 'name': SearchDatastore_Task, 'duration_secs': 0.009779} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3067.152037] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/861cbb94-00a7-4cf9-9e2f-979a61cdabe5 is no longer used. Deleting! [ 3067.152037] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/861cbb94-00a7-4cf9-9e2f-979a61cdabe5 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3067.152037] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7d3eb07e-437b-4236-9d29-b47bdf382edf {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3067.157633] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3067.157633] env[61215]: value = "task-1690568" [ 3067.157633] env[61215]: _type = "Task" [ 3067.157633] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3067.164832] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690568, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3067.667552] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690568, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.109303} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3067.667865] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3067.667946] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/861cbb94-00a7-4cf9-9e2f-979a61cdabe5" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3067.668185] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/91b7886f-79eb-4874-81dc-03841a653381" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3067.668308] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/91b7886f-79eb-4874-81dc-03841a653381" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3067.668622] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/91b7886f-79eb-4874-81dc-03841a653381" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3067.668887] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-524e4557-55d9-4724-85cc-2703dba48b8e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3067.673201] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3067.673201] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]5274a1c9-91e3-2660-5e5a-9c9376cac325" [ 3067.673201] env[61215]: _type = "Task" [ 3067.673201] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3067.680786] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5274a1c9-91e3-2660-5e5a-9c9376cac325, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3067.819294] env[61215]: WARNING oslo_vmware.rw_handles [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 3067.819294] env[61215]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 3067.819294] env[61215]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 3067.819294] env[61215]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 3067.819294] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 3067.819294] env[61215]: ERROR oslo_vmware.rw_handles response.begin() [ 3067.819294] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 3067.819294] env[61215]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 3067.819294] env[61215]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 3067.819294] env[61215]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 3067.819294] env[61215]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 3067.819294] env[61215]: ERROR oslo_vmware.rw_handles [ 3067.820244] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Downloaded image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to vmware_temp/50a2df0b-9d50-488b-9f38-028b60c7b8ad/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 3067.821807] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Caching image {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 3067.822228] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Copying Virtual Disk [datastore1] vmware_temp/50a2df0b-9d50-488b-9f38-028b60c7b8ad/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk to [datastore1] vmware_temp/50a2df0b-9d50-488b-9f38-028b60c7b8ad/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk {{(pid=61215) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 3067.822527] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a4c69e54-b681-48f4-8187-b13cc85c4d53 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3067.832268] env[61215]: DEBUG oslo_vmware.api [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Waiting for the task: (returnval){ [ 3067.832268] env[61215]: value = "task-1690569" [ 3067.832268] env[61215]: _type = "Task" [ 3067.832268] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3067.840149] env[61215]: DEBUG oslo_vmware.api [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Task: {'id': task-1690569, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3068.184779] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5274a1c9-91e3-2660-5e5a-9c9376cac325, 'name': SearchDatastore_Task, 'duration_secs': 0.010222} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3068.185478] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/91b7886f-79eb-4874-81dc-03841a653381 is no longer used. Deleting! [ 3068.185812] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/91b7886f-79eb-4874-81dc-03841a653381 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3068.186239] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3fd21545-a397-4071-b697-b8e883e26350 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3068.193971] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3068.193971] env[61215]: value = "task-1690570" [ 3068.193971] env[61215]: _type = "Task" [ 3068.193971] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3068.202408] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690570, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3068.341779] env[61215]: DEBUG oslo_vmware.exceptions [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Fault InvalidArgument not matched. {{(pid=61215) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 3068.342093] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3068.342657] env[61215]: ERROR nova.compute.manager [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3068.342657] env[61215]: Faults: ['InvalidArgument'] [ 3068.342657] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Traceback (most recent call last): [ 3068.342657] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] File "/opt/stack/nova/nova/compute/manager.py", line 2886, in _build_resources [ 3068.342657] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] yield resources [ 3068.342657] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 3068.342657] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] self.driver.spawn(context, instance, image_meta, [ 3068.342657] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 3068.342657] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 3068.342657] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 3068.342657] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] self._fetch_image_if_missing(context, vi) [ 3068.342657] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 3068.342657] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] image_cache(vi, tmp_image_ds_loc) [ 3068.342657] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 3068.342657] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] vm_util.copy_virtual_disk( [ 3068.342657] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 3068.342657] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] session._wait_for_task(vmdk_copy_task) [ 3068.342657] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 3068.342657] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] return self.wait_for_task(task_ref) [ 3068.342657] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 3068.342657] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] return evt.wait() [ 3068.342657] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 3068.342657] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] result = hub.switch() [ 3068.342657] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 3068.342657] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] return self.greenlet.switch() [ 3068.342657] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 3068.342657] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] self.f(*self.args, **self.kw) [ 3068.342657] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 3068.342657] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] raise exceptions.translate_fault(task_info.error) [ 3068.342657] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3068.342657] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Faults: ['InvalidArgument'] [ 3068.342657] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] [ 3068.344079] env[61215]: INFO nova.compute.manager [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Terminating instance [ 3068.344605] env[61215]: DEBUG oslo_concurrency.lockutils [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3068.344816] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3068.345071] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ba585ca4-20f3-4863-9759-18ae42bf4aad {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3068.347356] env[61215]: DEBUG nova.compute.manager [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Start destroying the instance on the hypervisor. {{(pid=61215) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3142}} [ 3068.347555] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Destroying instance {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 3068.348278] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f422bff-5fea-45c9-b8d4-380eaa6a5f1e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3068.354844] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Unregistering the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 3068.355730] env[61215]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-61582f67-cb3a-4277-9e67-d5a3132f96d1 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3068.357028] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3068.357211] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61215) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 3068.357849] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36b2095b-0965-4958-b729-23386b186c12 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3068.362615] env[61215]: DEBUG oslo_vmware.api [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] Waiting for the task: (returnval){ [ 3068.362615] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52d4539d-d4ea-86a6-55f9-ad5c6967ecb5" [ 3068.362615] env[61215]: _type = "Task" [ 3068.362615] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3068.374897] env[61215]: DEBUG oslo_vmware.api [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52d4539d-d4ea-86a6-55f9-ad5c6967ecb5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3068.612281] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Unregistered the VM {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 3068.612593] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Deleting contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 3068.612897] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Deleting the datastore file [datastore1] 039b406e-bc8c-41f9-a0d9-39d845b94a3f {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3068.613295] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4919f9e1-ac22-4898-999b-0e482037bc0a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3068.625577] env[61215]: DEBUG oslo_vmware.api [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Waiting for the task: (returnval){ [ 3068.625577] env[61215]: value = "task-1690572" [ 3068.625577] env[61215]: _type = "Task" [ 3068.625577] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3068.633802] env[61215]: DEBUG oslo_vmware.api [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Task: {'id': task-1690572, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3068.703655] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690570, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.103147} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3068.704149] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3068.704149] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/91b7886f-79eb-4874-81dc-03841a653381" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3068.704383] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/f2a5f832-768a-4c8e-a00e-0e34c18abc46" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3068.704572] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/f2a5f832-768a-4c8e-a00e-0e34c18abc46" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3068.704925] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/f2a5f832-768a-4c8e-a00e-0e34c18abc46" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3068.705235] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56de4327-b288-4e83-9bc1-277a97f67907 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3068.709619] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3068.709619] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52afa5d4-7cd8-75ce-4572-41f88c600776" [ 3068.709619] env[61215]: _type = "Task" [ 3068.709619] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3068.717565] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52afa5d4-7cd8-75ce-4572-41f88c600776, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3068.873904] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] [instance: 2cfccfbb-62f4-4cf2-a8cf-4fc0939fbd94] Preparing fetch location {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 3068.874243] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] Creating directory with path [datastore1] vmware_temp/b5b476dc-52d9-471b-8fb9-5fd7a9c29c91/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 3068.874488] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9f108cc6-08f7-44fa-a609-5f6d0e3bc19a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3068.885720] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] Created directory with path [datastore1] vmware_temp/b5b476dc-52d9-471b-8fb9-5fd7a9c29c91/e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 3068.885907] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] [instance: 2cfccfbb-62f4-4cf2-a8cf-4fc0939fbd94] Fetch image to [datastore1] vmware_temp/b5b476dc-52d9-471b-8fb9-5fd7a9c29c91/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 3068.886094] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] [instance: 2cfccfbb-62f4-4cf2-a8cf-4fc0939fbd94] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to [datastore1] vmware_temp/b5b476dc-52d9-471b-8fb9-5fd7a9c29c91/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk on the data store datastore1 {{(pid=61215) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 3068.886963] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fe44751-2f17-450e-9127-38aafb221e47 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3068.894548] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a8d6837-ae38-4cfd-a607-1d8f9e101c65 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3068.903189] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dee1037-ea2c-4a5f-9642-430cb0eafd33 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3068.933268] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfd43976-3346-4cee-aa3c-0d771e493374 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3068.938397] env[61215]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5f73a74e-a101-464a-aad9-227e3dbafcbe {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3068.959969] env[61215]: DEBUG nova.virt.vmwareapi.images [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] [instance: 2cfccfbb-62f4-4cf2-a8cf-4fc0939fbd94] Downloading image file data e91f0c25-9ff9-4937-8440-f47cfb2028bc to the data store datastore1 {{(pid=61215) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 3069.012320] env[61215]: DEBUG oslo_vmware.rw_handles [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b5b476dc-52d9-471b-8fb9-5fd7a9c29c91/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 3069.073013] env[61215]: DEBUG oslo_vmware.rw_handles [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] Completed reading data from the image iterator. {{(pid=61215) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 3069.073177] env[61215]: DEBUG oslo_vmware.rw_handles [None req-fd526e85-379f-4e48-9621-ac02860e20f8 tempest-ServerDiskConfigTestJSON-1345860763 tempest-ServerDiskConfigTestJSON-1345860763-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b5b476dc-52d9-471b-8fb9-5fd7a9c29c91/e91f0c25-9ff9-4937-8440-f47cfb2028bc/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61215) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 3069.135939] env[61215]: DEBUG oslo_vmware.api [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Task: {'id': task-1690572, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.064249} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3069.136212] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3069.136412] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Deleted contents of the VM from datastore datastore1 {{(pid=61215) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 3069.136585] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Instance destroyed {{(pid=61215) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 3069.136761] env[61215]: INFO nova.compute.manager [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Took 0.79 seconds to destroy the instance on the hypervisor. [ 3069.138841] env[61215]: DEBUG nova.compute.claims [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Aborting claim: {{(pid=61215) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 3069.139034] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 3069.139257] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 3069.222059] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52afa5d4-7cd8-75ce-4572-41f88c600776, 'name': SearchDatastore_Task, 'duration_secs': 0.008519} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3069.222382] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/f2a5f832-768a-4c8e-a00e-0e34c18abc46 is no longer used. Deleting! [ 3069.222527] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/f2a5f832-768a-4c8e-a00e-0e34c18abc46 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3069.222786] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c8e24f49-a1af-4c96-a12f-d017420e10e1 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3069.231884] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3069.231884] env[61215]: value = "task-1690573" [ 3069.231884] env[61215]: _type = "Task" [ 3069.231884] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3069.235758] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5fd10b1-4658-4855-9010-67f0fffadbc0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3069.242392] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690573, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3069.244830] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92adf6f0-b875-40c7-8319-1d441e8fba99 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3069.277319] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82be4362-a376-4a8d-8852-61701c0c0923 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3069.284163] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19ef5a7e-c6eb-411a-b783-8ee752d51386 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3069.296916] env[61215]: DEBUG nova.compute.provider_tree [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Updating inventory in ProviderTree for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 3069.334599] env[61215]: DEBUG nova.scheduler.client.report [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Updated inventory for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 with generation 103 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:957}} [ 3069.334857] env[61215]: DEBUG nova.compute.provider_tree [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Updating resource provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 generation from 103 to 104 during operation: update_inventory {{(pid=61215) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 3069.335066] env[61215]: DEBUG nova.compute.provider_tree [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Updating inventory in ProviderTree for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 3069.349359] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.210s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 3069.349901] env[61215]: ERROR nova.compute.manager [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3069.349901] env[61215]: Faults: ['InvalidArgument'] [ 3069.349901] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Traceback (most recent call last): [ 3069.349901] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] File "/opt/stack/nova/nova/compute/manager.py", line 2633, in _build_and_run_instance [ 3069.349901] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] self.driver.spawn(context, instance, image_meta, [ 3069.349901] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 3069.349901] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 3069.349901] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 3069.349901] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] self._fetch_image_if_missing(context, vi) [ 3069.349901] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 3069.349901] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] image_cache(vi, tmp_image_ds_loc) [ 3069.349901] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 3069.349901] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] vm_util.copy_virtual_disk( [ 3069.349901] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 3069.349901] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] session._wait_for_task(vmdk_copy_task) [ 3069.349901] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 3069.349901] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] return self.wait_for_task(task_ref) [ 3069.349901] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 3069.349901] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] return evt.wait() [ 3069.349901] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 3069.349901] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] result = hub.switch() [ 3069.349901] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 3069.349901] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] return self.greenlet.switch() [ 3069.349901] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 3069.349901] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] self.f(*self.args, **self.kw) [ 3069.349901] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 3069.349901] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] raise exceptions.translate_fault(task_info.error) [ 3069.349901] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 3069.349901] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Faults: ['InvalidArgument'] [ 3069.349901] env[61215]: ERROR nova.compute.manager [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] [ 3069.350765] env[61215]: DEBUG nova.compute.utils [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] VimFaultException {{(pid=61215) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 3069.352126] env[61215]: DEBUG nova.compute.manager [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Build of instance 039b406e-bc8c-41f9-a0d9-39d845b94a3f was re-scheduled: A specified parameter was not correct: fileType [ 3069.352126] env[61215]: Faults: ['InvalidArgument'] {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2472}} [ 3069.352502] env[61215]: DEBUG nova.compute.manager [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Unplugging VIFs for instance {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2998}} [ 3069.352679] env[61215]: DEBUG nova.compute.manager [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61215) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3021}} [ 3069.352848] env[61215]: DEBUG nova.compute.manager [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Deallocating network for instance {{(pid=61215) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2281}} [ 3069.353033] env[61215]: DEBUG nova.network.neutron [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] deallocate_for_instance() {{(pid=61215) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 3069.684895] env[61215]: DEBUG nova.network.neutron [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Updating instance_info_cache with network_info: [] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 3069.697225] env[61215]: INFO nova.compute.manager [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 039b406e-bc8c-41f9-a0d9-39d845b94a3f] Took 0.34 seconds to deallocate network for instance. [ 3069.743566] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690573, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.108395} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3069.743875] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3069.745124] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/f2a5f832-768a-4c8e-a00e-0e34c18abc46" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3069.745124] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/3955961f-c4fc-4fe2-9399-49b36b0f8249" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3069.745124] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/3955961f-c4fc-4fe2-9399-49b36b0f8249" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3069.745124] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/3955961f-c4fc-4fe2-9399-49b36b0f8249" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3069.745124] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10ea9a52-570d-4c5a-a6de-a22ac970496c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3069.749419] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3069.749419] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52ee2ae8-2af4-3348-1b3f-9d499cce61d1" [ 3069.749419] env[61215]: _type = "Task" [ 3069.749419] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3069.757809] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52ee2ae8-2af4-3348-1b3f-9d499cce61d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3069.784212] env[61215]: INFO nova.scheduler.client.report [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Deleted allocations for instance 039b406e-bc8c-41f9-a0d9-39d845b94a3f [ 3069.808510] env[61215]: DEBUG oslo_concurrency.lockutils [None req-d8f93591-fa70-48a4-aff5-0fdcca3a6063 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Lock "039b406e-bc8c-41f9-a0d9-39d845b94a3f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 192.023s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 3070.260954] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52ee2ae8-2af4-3348-1b3f-9d499cce61d1, 'name': SearchDatastore_Task, 'duration_secs': 0.009381} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3070.261230] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/3955961f-c4fc-4fe2-9399-49b36b0f8249 is no longer used. Deleting! [ 3070.261377] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/3955961f-c4fc-4fe2-9399-49b36b0f8249 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3070.261644] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b16b350f-38dd-4f84-94cf-a4145828577f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3070.269554] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3070.269554] env[61215]: value = "task-1690574" [ 3070.269554] env[61215]: _type = "Task" [ 3070.269554] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3070.277447] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690574, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3070.780558] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690574, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.100529} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3070.780788] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3070.780961] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/3955961f-c4fc-4fe2-9399-49b36b0f8249" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3070.781202] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/b6088630-8757-4bb1-b798-c7ed74c1d827" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3070.781327] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/b6088630-8757-4bb1-b798-c7ed74c1d827" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3070.781657] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/b6088630-8757-4bb1-b798-c7ed74c1d827" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3070.781934] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-837d1c5b-ea56-4d4d-9b4f-d3902fcd1f31 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3070.786404] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3070.786404] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52139579-5da8-b220-714c-040975e569cd" [ 3070.786404] env[61215]: _type = "Task" [ 3070.786404] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3070.793896] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52139579-5da8-b220-714c-040975e569cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3071.300330] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52139579-5da8-b220-714c-040975e569cd, 'name': SearchDatastore_Task, 'duration_secs': 0.009141} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3071.300948] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/b6088630-8757-4bb1-b798-c7ed74c1d827 is no longer used. Deleting! [ 3071.301274] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/b6088630-8757-4bb1-b798-c7ed74c1d827 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3071.301746] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8ad89e3d-15d1-4a38-8a0b-580254106c30 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3071.308809] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3071.308809] env[61215]: value = "task-1690575" [ 3071.308809] env[61215]: _type = "Task" [ 3071.308809] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3071.317335] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690575, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3071.598008] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquiring lock "6492bdcc-8601-4667-a9a9-2ae8b8183d9d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 3071.598265] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Lock "6492bdcc-8601-4667-a9a9-2ae8b8183d9d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 3071.608854] env[61215]: DEBUG nova.compute.manager [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 6492bdcc-8601-4667-a9a9-2ae8b8183d9d] Starting instance... {{(pid=61215) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2424}} [ 3071.654751] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 3071.654988] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 3071.656373] env[61215]: INFO nova.compute.claims [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 6492bdcc-8601-4667-a9a9-2ae8b8183d9d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 3071.743303] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57778dc7-ae17-4d86-b0d0-aa266e976bab {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3071.750644] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-160c2108-f3df-4e85-86ab-063e6d58fbb3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3071.781197] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6a3346a-d8ac-4827-ae67-3a4939f14f63 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3071.787721] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92241166-d691-47a6-a315-679896ca99c8 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3071.800216] env[61215]: DEBUG nova.compute.provider_tree [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Inventory has not changed in ProviderTree for provider: 1329e087-aa78-44a2-9687-63a2b1b33fd5 {{(pid=61215) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 3071.809590] env[61215]: DEBUG nova.scheduler.client.report [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Inventory has not changed for provider 1329e087-aa78-44a2-9687-63a2b1b33fd5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 178, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61215) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 3071.820527] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690575, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.107938} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3071.820717] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3071.820886] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/b6088630-8757-4bb1-b798-c7ed74c1d827" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3071.821114] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/62cd638f-2e2a-4a15-b161-073a4109930a" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3071.821235] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/62cd638f-2e2a-4a15-b161-073a4109930a" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3071.821595] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/62cd638f-2e2a-4a15-b161-073a4109930a" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3071.822233] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59ff4985-1453-49c3-8483-027249b619c9 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3071.823992] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.169s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 3071.824475] env[61215]: DEBUG nova.compute.manager [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 6492bdcc-8601-4667-a9a9-2ae8b8183d9d] Start building networks asynchronously for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2821}} [ 3071.829692] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3071.829692] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]5201c67c-0e55-e87b-1bd6-d68eae8edef5" [ 3071.829692] env[61215]: _type = "Task" [ 3071.829692] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3071.836822] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5201c67c-0e55-e87b-1bd6-d68eae8edef5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3071.855970] env[61215]: DEBUG nova.compute.utils [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Using /dev/sd instead of None {{(pid=61215) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 3071.857507] env[61215]: DEBUG nova.compute.manager [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 6492bdcc-8601-4667-a9a9-2ae8b8183d9d] Allocating IP information in the background. {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1973}} [ 3071.857688] env[61215]: DEBUG nova.network.neutron [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 6492bdcc-8601-4667-a9a9-2ae8b8183d9d] allocate_for_instance() {{(pid=61215) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 3071.865607] env[61215]: DEBUG nova.compute.manager [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 6492bdcc-8601-4667-a9a9-2ae8b8183d9d] Start building block device mappings for instance. {{(pid=61215) _build_resources /opt/stack/nova/nova/compute/manager.py:2856}} [ 3071.927045] env[61215]: DEBUG nova.compute.manager [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 6492bdcc-8601-4667-a9a9-2ae8b8183d9d] Start spawning the instance on the hypervisor. {{(pid=61215) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2630}} [ 3071.930223] env[61215]: DEBUG nova.policy [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c9db5492250b426c80f611d7a5686c1b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd3eac98da0cb41cbad12d92e9151b143', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61215) authorize /opt/stack/nova/nova/policy.py:203}} [ 3071.954956] env[61215]: DEBUG nova.virt.hardware [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-01T03:05:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-01T03:04:45Z,direct_url=,disk_format='vmdk',id=e91f0c25-9ff9-4937-8440-f47cfb2028bc,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='9bc3f57c82894c5d9e08b66e77a25dc5',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-01T03:04:46Z,virtual_size=,visibility=), allow threads: False {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 3071.955238] env[61215]: DEBUG nova.virt.hardware [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Flavor limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 3071.955409] env[61215]: DEBUG nova.virt.hardware [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Image limits 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 3071.955598] env[61215]: DEBUG nova.virt.hardware [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Flavor pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 3071.955782] env[61215]: DEBUG nova.virt.hardware [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Image pref 0:0:0 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 3071.955900] env[61215]: DEBUG nova.virt.hardware [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61215) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 3071.956125] env[61215]: DEBUG nova.virt.hardware [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 3071.956297] env[61215]: DEBUG nova.virt.hardware [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 3071.956474] env[61215]: DEBUG nova.virt.hardware [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Got 1 possible topologies {{(pid=61215) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 3071.956639] env[61215]: DEBUG nova.virt.hardware [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 3071.956817] env[61215]: DEBUG nova.virt.hardware [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61215) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 3071.957669] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d19e561f-622a-4b2e-b751-389a16d21aa4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3071.965716] env[61215]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92d6bb21-691d-4857-90e9-649046fca5a7 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3072.278061] env[61215]: DEBUG nova.network.neutron [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 6492bdcc-8601-4667-a9a9-2ae8b8183d9d] Successfully created port: 9c5987ba-003e-4846-8200-12cda09d682f {{(pid=61215) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 3072.343306] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5201c67c-0e55-e87b-1bd6-d68eae8edef5, 'name': SearchDatastore_Task, 'duration_secs': 0.009392} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3072.343644] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/62cd638f-2e2a-4a15-b161-073a4109930a is no longer used. Deleting! [ 3072.343790] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/62cd638f-2e2a-4a15-b161-073a4109930a {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3072.344071] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bf09e6fe-270e-4f29-9d3e-1ee9e72477e0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3072.351210] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3072.351210] env[61215]: value = "task-1690576" [ 3072.351210] env[61215]: _type = "Task" [ 3072.351210] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3072.358440] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690576, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3072.811880] env[61215]: DEBUG nova.compute.manager [req-1191649c-ba5b-4cfd-a1d6-533a7be9464d req-e5895f0d-a8a1-4bf4-92b6-461f5041aaec service nova] [instance: 6492bdcc-8601-4667-a9a9-2ae8b8183d9d] Received event network-vif-plugged-9c5987ba-003e-4846-8200-12cda09d682f {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 3072.812168] env[61215]: DEBUG oslo_concurrency.lockutils [req-1191649c-ba5b-4cfd-a1d6-533a7be9464d req-e5895f0d-a8a1-4bf4-92b6-461f5041aaec service nova] Acquiring lock "6492bdcc-8601-4667-a9a9-2ae8b8183d9d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 3072.812369] env[61215]: DEBUG oslo_concurrency.lockutils [req-1191649c-ba5b-4cfd-a1d6-533a7be9464d req-e5895f0d-a8a1-4bf4-92b6-461f5041aaec service nova] Lock "6492bdcc-8601-4667-a9a9-2ae8b8183d9d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 3072.812540] env[61215]: DEBUG oslo_concurrency.lockutils [req-1191649c-ba5b-4cfd-a1d6-533a7be9464d req-e5895f0d-a8a1-4bf4-92b6-461f5041aaec service nova] Lock "6492bdcc-8601-4667-a9a9-2ae8b8183d9d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61215) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 3072.812708] env[61215]: DEBUG nova.compute.manager [req-1191649c-ba5b-4cfd-a1d6-533a7be9464d req-e5895f0d-a8a1-4bf4-92b6-461f5041aaec service nova] [instance: 6492bdcc-8601-4667-a9a9-2ae8b8183d9d] No waiting events found dispatching network-vif-plugged-9c5987ba-003e-4846-8200-12cda09d682f {{(pid=61215) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 3072.812878] env[61215]: WARNING nova.compute.manager [req-1191649c-ba5b-4cfd-a1d6-533a7be9464d req-e5895f0d-a8a1-4bf4-92b6-461f5041aaec service nova] [instance: 6492bdcc-8601-4667-a9a9-2ae8b8183d9d] Received unexpected event network-vif-plugged-9c5987ba-003e-4846-8200-12cda09d682f for instance with vm_state building and task_state spawning. [ 3072.861776] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690576, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.109105} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3072.861993] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3072.862186] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/62cd638f-2e2a-4a15-b161-073a4109930a" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3072.862395] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/c960f8cd-7361-45db-b16e-d3086cde41ee" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3072.862515] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/c960f8cd-7361-45db-b16e-d3086cde41ee" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3072.862840] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/c960f8cd-7361-45db-b16e-d3086cde41ee" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3072.863121] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79c480ff-ea45-49c8-a5c3-9f03b54d73f2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3072.867651] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3072.867651] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]529ab9cc-bfdc-7878-201a-069640b3a44f" [ 3072.867651] env[61215]: _type = "Task" [ 3072.867651] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3072.881588] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]529ab9cc-bfdc-7878-201a-069640b3a44f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3072.894216] env[61215]: DEBUG nova.network.neutron [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 6492bdcc-8601-4667-a9a9-2ae8b8183d9d] Successfully updated port: 9c5987ba-003e-4846-8200-12cda09d682f {{(pid=61215) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 3072.906976] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquiring lock "refresh_cache-6492bdcc-8601-4667-a9a9-2ae8b8183d9d" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3072.907133] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquired lock "refresh_cache-6492bdcc-8601-4667-a9a9-2ae8b8183d9d" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3072.907299] env[61215]: DEBUG nova.network.neutron [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 6492bdcc-8601-4667-a9a9-2ae8b8183d9d] Building network info cache for instance {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 3072.950551] env[61215]: DEBUG nova.network.neutron [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 6492bdcc-8601-4667-a9a9-2ae8b8183d9d] Instance cache missing network info. {{(pid=61215) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 3073.106172] env[61215]: DEBUG nova.network.neutron [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 6492bdcc-8601-4667-a9a9-2ae8b8183d9d] Updating instance_info_cache with network_info: [{"id": "9c5987ba-003e-4846-8200-12cda09d682f", "address": "fa:16:3e:3a:b7:5c", "network": {"id": "a742dd7e-4910-4f56-91c4-28c6da7554f1", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2129807098-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3eac98da0cb41cbad12d92e9151b143", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c5987ba-00", "ovs_interfaceid": "9c5987ba-003e-4846-8200-12cda09d682f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 3073.118721] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Releasing lock "refresh_cache-6492bdcc-8601-4667-a9a9-2ae8b8183d9d" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3073.118997] env[61215]: DEBUG nova.compute.manager [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 6492bdcc-8601-4667-a9a9-2ae8b8183d9d] Instance network_info: |[{"id": "9c5987ba-003e-4846-8200-12cda09d682f", "address": "fa:16:3e:3a:b7:5c", "network": {"id": "a742dd7e-4910-4f56-91c4-28c6da7554f1", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2129807098-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3eac98da0cb41cbad12d92e9151b143", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c5987ba-00", "ovs_interfaceid": "9c5987ba-003e-4846-8200-12cda09d682f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61215) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 3073.119430] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 6492bdcc-8601-4667-a9a9-2ae8b8183d9d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3a:b7:5c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b98c49ac-0eb7-4311-aa8f-60581b2ce706', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9c5987ba-003e-4846-8200-12cda09d682f', 'vif_model': 'vmxnet3'}] {{(pid=61215) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 3073.126981] env[61215]: DEBUG oslo.service.loopingcall [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61215) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 3073.127405] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6492bdcc-8601-4667-a9a9-2ae8b8183d9d] Creating VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 3073.127636] env[61215]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b175f649-a45e-4f14-b726-578e015db352 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3073.148858] env[61215]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 3073.148858] env[61215]: value = "task-1690577" [ 3073.148858] env[61215]: _type = "Task" [ 3073.148858] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3073.157474] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690577, 'name': CreateVM_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3073.379627] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]529ab9cc-bfdc-7878-201a-069640b3a44f, 'name': SearchDatastore_Task, 'duration_secs': 0.012629} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3073.379965] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/c960f8cd-7361-45db-b16e-d3086cde41ee is no longer used. Deleting! [ 3073.380136] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/c960f8cd-7361-45db-b16e-d3086cde41ee {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3073.380411] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1496d79c-7c08-4601-93f3-47d309587bc1 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3073.387070] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3073.387070] env[61215]: value = "task-1690578" [ 3073.387070] env[61215]: _type = "Task" [ 3073.387070] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3073.395496] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690578, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3073.659247] env[61215]: DEBUG oslo_vmware.api [-] Task: {'id': task-1690577, 'name': CreateVM_Task, 'duration_secs': 0.296027} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3073.659489] env[61215]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6492bdcc-8601-4667-a9a9-2ae8b8183d9d] Created VM on the ESX host {{(pid=61215) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 3073.660248] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3073.660440] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquired lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3073.660778] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3073.661053] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f5cca5e-84de-4602-be25-0d43db7828b7 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3073.665898] env[61215]: DEBUG oslo_vmware.api [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Waiting for the task: (returnval){ [ 3073.665898] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]5229f532-6ce1-88d0-a11f-2350f75f77b0" [ 3073.665898] env[61215]: _type = "Task" [ 3073.665898] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3073.676047] env[61215]: DEBUG oslo_vmware.api [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5229f532-6ce1-88d0-a11f-2350f75f77b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3073.896949] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690578, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.11038} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3073.897333] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3073.897333] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/c960f8cd-7361-45db-b16e-d3086cde41ee" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3073.897524] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/4c4220e0-43c3-4e73-8d51-f8db8a49271a" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3073.897644] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/4c4220e0-43c3-4e73-8d51-f8db8a49271a" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3073.897974] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/4c4220e0-43c3-4e73-8d51-f8db8a49271a" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3073.898259] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f15cb133-5770-446b-90ef-ece316dc47d5 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3073.902329] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3073.902329] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]525f9d8b-65e4-a095-05b4-fdba42f4bb7e" [ 3073.902329] env[61215]: _type = "Task" [ 3073.902329] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3073.909370] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]525f9d8b-65e4-a095-05b4-fdba42f4bb7e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3074.176566] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Releasing lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3074.176784] env[61215]: DEBUG nova.virt.vmwareapi.vmops [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] [instance: 6492bdcc-8601-4667-a9a9-2ae8b8183d9d] Processing image e91f0c25-9ff9-4937-8440-f47cfb2028bc {{(pid=61215) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 3074.177020] env[61215]: DEBUG oslo_concurrency.lockutils [None req-a5d0c23b-8f0c-46f8-ad72-046f857a79f0 tempest-DeleteServersTestJSON-1921371305 tempest-DeleteServersTestJSON-1921371305-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/e91f0c25-9ff9-4937-8440-f47cfb2028bc/e91f0c25-9ff9-4937-8440-f47cfb2028bc.vmdk" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3074.413622] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]525f9d8b-65e4-a095-05b4-fdba42f4bb7e, 'name': SearchDatastore_Task, 'duration_secs': 0.009632} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3074.413941] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/4c4220e0-43c3-4e73-8d51-f8db8a49271a is no longer used. Deleting! [ 3074.414171] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/4c4220e0-43c3-4e73-8d51-f8db8a49271a {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3074.414434] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0e640d50-9abd-4d0d-912c-2ea305228c39 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3074.421411] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3074.421411] env[61215]: value = "task-1690579" [ 3074.421411] env[61215]: _type = "Task" [ 3074.421411] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3074.428986] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690579, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3074.837863] env[61215]: DEBUG nova.compute.manager [req-4ef3882f-f6ea-489d-9d46-51eb9c2e6f18 req-20e4e3e5-ac9d-46a2-971a-61f3cec0759b service nova] [instance: 6492bdcc-8601-4667-a9a9-2ae8b8183d9d] Received event network-changed-9c5987ba-003e-4846-8200-12cda09d682f {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11123}} [ 3074.838092] env[61215]: DEBUG nova.compute.manager [req-4ef3882f-f6ea-489d-9d46-51eb9c2e6f18 req-20e4e3e5-ac9d-46a2-971a-61f3cec0759b service nova] [instance: 6492bdcc-8601-4667-a9a9-2ae8b8183d9d] Refreshing instance network info cache due to event network-changed-9c5987ba-003e-4846-8200-12cda09d682f. {{(pid=61215) external_instance_event /opt/stack/nova/nova/compute/manager.py:11128}} [ 3074.838316] env[61215]: DEBUG oslo_concurrency.lockutils [req-4ef3882f-f6ea-489d-9d46-51eb9c2e6f18 req-20e4e3e5-ac9d-46a2-971a-61f3cec0759b service nova] Acquiring lock "refresh_cache-6492bdcc-8601-4667-a9a9-2ae8b8183d9d" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3074.838465] env[61215]: DEBUG oslo_concurrency.lockutils [req-4ef3882f-f6ea-489d-9d46-51eb9c2e6f18 req-20e4e3e5-ac9d-46a2-971a-61f3cec0759b service nova] Acquired lock "refresh_cache-6492bdcc-8601-4667-a9a9-2ae8b8183d9d" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3074.838628] env[61215]: DEBUG nova.network.neutron [req-4ef3882f-f6ea-489d-9d46-51eb9c2e6f18 req-20e4e3e5-ac9d-46a2-971a-61f3cec0759b service nova] [instance: 6492bdcc-8601-4667-a9a9-2ae8b8183d9d] Refreshing network info cache for port 9c5987ba-003e-4846-8200-12cda09d682f {{(pid=61215) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 3074.932806] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690579, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.104393} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3074.933130] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3074.933196] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/4c4220e0-43c3-4e73-8d51-f8db8a49271a" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3074.933418] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/645a4a3c-60aa-42f3-8bdb-913ff68db1bf" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3074.933548] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/645a4a3c-60aa-42f3-8bdb-913ff68db1bf" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3074.933853] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/645a4a3c-60aa-42f3-8bdb-913ff68db1bf" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3074.934141] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd79b2ae-2cdc-43d0-8afb-39a944f4cea0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3074.938146] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3074.938146] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]520c561c-e20d-744b-aaa4-844e2afe4761" [ 3074.938146] env[61215]: _type = "Task" [ 3074.938146] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3074.945188] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]520c561c-e20d-744b-aaa4-844e2afe4761, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3075.082296] env[61215]: DEBUG nova.network.neutron [req-4ef3882f-f6ea-489d-9d46-51eb9c2e6f18 req-20e4e3e5-ac9d-46a2-971a-61f3cec0759b service nova] [instance: 6492bdcc-8601-4667-a9a9-2ae8b8183d9d] Updated VIF entry in instance network info cache for port 9c5987ba-003e-4846-8200-12cda09d682f. {{(pid=61215) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 3075.082676] env[61215]: DEBUG nova.network.neutron [req-4ef3882f-f6ea-489d-9d46-51eb9c2e6f18 req-20e4e3e5-ac9d-46a2-971a-61f3cec0759b service nova] [instance: 6492bdcc-8601-4667-a9a9-2ae8b8183d9d] Updating instance_info_cache with network_info: [{"id": "9c5987ba-003e-4846-8200-12cda09d682f", "address": "fa:16:3e:3a:b7:5c", "network": {"id": "a742dd7e-4910-4f56-91c4-28c6da7554f1", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2129807098-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d3eac98da0cb41cbad12d92e9151b143", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b98c49ac-0eb7-4311-aa8f-60581b2ce706", "external-id": "nsx-vlan-transportzone-184", "segmentation_id": 184, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9c5987ba-00", "ovs_interfaceid": "9c5987ba-003e-4846-8200-12cda09d682f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61215) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 3075.092077] env[61215]: DEBUG oslo_concurrency.lockutils [req-4ef3882f-f6ea-489d-9d46-51eb9c2e6f18 req-20e4e3e5-ac9d-46a2-971a-61f3cec0759b service nova] Releasing lock "refresh_cache-6492bdcc-8601-4667-a9a9-2ae8b8183d9d" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3075.449402] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]520c561c-e20d-744b-aaa4-844e2afe4761, 'name': SearchDatastore_Task, 'duration_secs': 0.009681} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3075.449719] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/645a4a3c-60aa-42f3-8bdb-913ff68db1bf is no longer used. Deleting! [ 3075.449864] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/645a4a3c-60aa-42f3-8bdb-913ff68db1bf {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3075.450165] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-785f3483-03c9-4fe3-ab60-c6bcae47f4c9 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3075.456734] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3075.456734] env[61215]: value = "task-1690580" [ 3075.456734] env[61215]: _type = "Task" [ 3075.456734] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3075.464597] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690580, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3075.966551] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690580, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.104031} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3075.966887] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3075.966887] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/645a4a3c-60aa-42f3-8bdb-913ff68db1bf" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3075.967110] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/1c8ee8fd-5f0c-4c2e-89fe-5597995aa309" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3075.967231] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/1c8ee8fd-5f0c-4c2e-89fe-5597995aa309" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3075.967566] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/1c8ee8fd-5f0c-4c2e-89fe-5597995aa309" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3075.967823] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5649a2c0-9be1-4cca-9439-37b91357307d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3075.971991] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3075.971991] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]5277faa3-5bcb-a06b-16ce-cf5d56516665" [ 3075.971991] env[61215]: _type = "Task" [ 3075.971991] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3075.979102] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5277faa3-5bcb-a06b-16ce-cf5d56516665, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3076.483015] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5277faa3-5bcb-a06b-16ce-cf5d56516665, 'name': SearchDatastore_Task, 'duration_secs': 0.008791} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3076.483341] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/1c8ee8fd-5f0c-4c2e-89fe-5597995aa309 is no longer used. Deleting! [ 3076.483490] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/1c8ee8fd-5f0c-4c2e-89fe-5597995aa309 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3076.483750] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5cdda8ed-8568-4a82-8f8c-f7578415783e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3076.489709] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3076.489709] env[61215]: value = "task-1690581" [ 3076.489709] env[61215]: _type = "Task" [ 3076.489709] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3076.496721] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690581, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3076.999767] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690581, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.104855} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3077.000066] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3077.000187] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/1c8ee8fd-5f0c-4c2e-89fe-5597995aa309" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3077.000416] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/fe6db0de-d101-4e56-96b8-79e2fec85b4d" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3077.000537] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/fe6db0de-d101-4e56-96b8-79e2fec85b4d" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3077.000868] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/fe6db0de-d101-4e56-96b8-79e2fec85b4d" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3077.001150] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f659e0e-8c5c-4d20-883e-662031ccee29 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3077.005507] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3077.005507] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52b0e637-f9d4-9567-48cc-9ad723c3db05" [ 3077.005507] env[61215]: _type = "Task" [ 3077.005507] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3077.012740] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52b0e637-f9d4-9567-48cc-9ad723c3db05, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3077.516781] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52b0e637-f9d4-9567-48cc-9ad723c3db05, 'name': SearchDatastore_Task, 'duration_secs': 0.008663} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3077.517110] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/fe6db0de-d101-4e56-96b8-79e2fec85b4d is no longer used. Deleting! [ 3077.517269] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/fe6db0de-d101-4e56-96b8-79e2fec85b4d {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3077.517528] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4310af16-ff80-4609-93da-b81ef36b7d83 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3077.524366] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3077.524366] env[61215]: value = "task-1690582" [ 3077.524366] env[61215]: _type = "Task" [ 3077.524366] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3077.532539] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690582, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3078.036055] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690582, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.098061} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3078.036055] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3078.036055] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/fe6db0de-d101-4e56-96b8-79e2fec85b4d" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3078.036055] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/c51ef2b4-ce12-4fcb-bddd-37b05b3ee491" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3078.036055] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/c51ef2b4-ce12-4fcb-bddd-37b05b3ee491" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3078.036484] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/c51ef2b4-ce12-4fcb-bddd-37b05b3ee491" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3078.036575] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b8ec48f-404d-460c-9d00-b6df69057420 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3078.040967] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3078.040967] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52f03e44-c2c1-007e-402e-40bfb0d8cfe9" [ 3078.040967] env[61215]: _type = "Task" [ 3078.040967] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3078.048343] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52f03e44-c2c1-007e-402e-40bfb0d8cfe9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3078.550985] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52f03e44-c2c1-007e-402e-40bfb0d8cfe9, 'name': SearchDatastore_Task, 'duration_secs': 0.013602} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3078.551319] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/c51ef2b4-ce12-4fcb-bddd-37b05b3ee491 is no longer used. Deleting! [ 3078.551472] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/c51ef2b4-ce12-4fcb-bddd-37b05b3ee491 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3078.551735] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6d53c4bc-5246-427f-878b-b9f199b9943d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3078.558019] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3078.558019] env[61215]: value = "task-1690583" [ 3078.558019] env[61215]: _type = "Task" [ 3078.558019] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3078.565067] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690583, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3079.067565] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690583, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.106428} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3079.068022] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3079.068022] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/c51ef2b4-ce12-4fcb-bddd-37b05b3ee491" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3079.068263] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/8c397d6a-2255-40a1-a544-a5131382ed19" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3079.068361] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/8c397d6a-2255-40a1-a544-a5131382ed19" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3079.068709] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/8c397d6a-2255-40a1-a544-a5131382ed19" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3079.068976] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e292ec39-d9fe-4eef-a30d-f0c2cad11e95 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3079.073220] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3079.073220] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]523fd2d2-457e-d2cc-a336-637d881d3367" [ 3079.073220] env[61215]: _type = "Task" [ 3079.073220] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3079.080826] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]523fd2d2-457e-d2cc-a336-637d881d3367, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3079.584185] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]523fd2d2-457e-d2cc-a336-637d881d3367, 'name': SearchDatastore_Task, 'duration_secs': 0.009571} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3079.584520] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/8c397d6a-2255-40a1-a544-a5131382ed19 is no longer used. Deleting! [ 3079.584669] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/8c397d6a-2255-40a1-a544-a5131382ed19 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3079.584982] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-29cde3b5-524b-4f81-8474-4383623f64b2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3079.591046] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3079.591046] env[61215]: value = "task-1690584" [ 3079.591046] env[61215]: _type = "Task" [ 3079.591046] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3079.598616] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690584, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3080.100236] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690584, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.09243} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3080.100579] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3080.100579] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/8c397d6a-2255-40a1-a544-a5131382ed19" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3080.100799] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/01a67424-9e71-41bb-be2f-fdaa78948304" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3080.100931] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/01a67424-9e71-41bb-be2f-fdaa78948304" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3080.101255] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/01a67424-9e71-41bb-be2f-fdaa78948304" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3080.101521] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0589c3b3-3b68-4898-ba6a-8960cb207ed9 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3080.105739] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3080.105739] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52dc1350-eacb-a34a-0baa-72d0f2a2887c" [ 3080.105739] env[61215]: _type = "Task" [ 3080.105739] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3080.112767] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52dc1350-eacb-a34a-0baa-72d0f2a2887c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3080.616148] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52dc1350-eacb-a34a-0baa-72d0f2a2887c, 'name': SearchDatastore_Task, 'duration_secs': 0.010212} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3080.616459] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/01a67424-9e71-41bb-be2f-fdaa78948304 is no longer used. Deleting! [ 3080.616604] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/01a67424-9e71-41bb-be2f-fdaa78948304 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3080.616866] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-223f6af4-fbe7-42eb-993e-202fe3cda0a5 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3080.623040] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3080.623040] env[61215]: value = "task-1690585" [ 3080.623040] env[61215]: _type = "Task" [ 3080.623040] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3081.131846] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690585, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157021} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3081.132215] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3081.132263] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/01a67424-9e71-41bb-be2f-fdaa78948304" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3081.132488] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/f0be311c-e8ca-4709-a9b3-6d0ee1a34a39" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3081.132608] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/f0be311c-e8ca-4709-a9b3-6d0ee1a34a39" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3081.132947] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/f0be311c-e8ca-4709-a9b3-6d0ee1a34a39" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3081.133194] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f2b1475-ab25-4781-aabd-2d943681752d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3081.137378] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3081.137378] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]5231e26f-bdd9-697e-6eda-43b93bf69a55" [ 3081.137378] env[61215]: _type = "Task" [ 3081.137378] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3081.144381] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5231e26f-bdd9-697e-6eda-43b93bf69a55, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3081.648107] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5231e26f-bdd9-697e-6eda-43b93bf69a55, 'name': SearchDatastore_Task, 'duration_secs': 0.009103} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3081.648403] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/f0be311c-e8ca-4709-a9b3-6d0ee1a34a39 is no longer used. Deleting! [ 3081.648561] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/f0be311c-e8ca-4709-a9b3-6d0ee1a34a39 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3081.649019] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-60b8709f-43ee-48a7-814d-479ba06fe74e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3081.654785] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3081.654785] env[61215]: value = "task-1690586" [ 3081.654785] env[61215]: _type = "Task" [ 3081.654785] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3081.662800] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690586, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3082.163993] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690586, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.104864} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3082.166027] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3082.166027] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/f0be311c-e8ca-4709-a9b3-6d0ee1a34a39" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3082.166027] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/b0907d05-fb73-4936-8433-16c2c2835599" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3082.166027] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/b0907d05-fb73-4936-8433-16c2c2835599" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3082.166027] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/b0907d05-fb73-4936-8433-16c2c2835599" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3082.166027] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67fafbda-3efb-4c26-ae51-64d25ba64bd0 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3082.170100] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3082.170100] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52380f34-a15e-acad-d9c7-4f9a19288930" [ 3082.170100] env[61215]: _type = "Task" [ 3082.170100] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3082.177353] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52380f34-a15e-acad-d9c7-4f9a19288930, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3082.680875] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52380f34-a15e-acad-d9c7-4f9a19288930, 'name': SearchDatastore_Task, 'duration_secs': 0.008982} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3082.681199] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/b0907d05-fb73-4936-8433-16c2c2835599 is no longer used. Deleting! [ 3082.681349] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/b0907d05-fb73-4936-8433-16c2c2835599 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3082.681606] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3697be43-0d57-45b8-a62f-20be314f3e11 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3082.687666] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3082.687666] env[61215]: value = "task-1690587" [ 3082.687666] env[61215]: _type = "Task" [ 3082.687666] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3082.694869] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690587, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3083.197064] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690587, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.105957} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3083.197377] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3083.197486] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/b0907d05-fb73-4936-8433-16c2c2835599" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3083.197684] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/f69db335-1cd0-4f6e-a5d1-8d90cfa58327" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3083.197804] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/f69db335-1cd0-4f6e-a5d1-8d90cfa58327" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3083.198134] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/f69db335-1cd0-4f6e-a5d1-8d90cfa58327" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3083.198401] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b9d26c9-6f68-4598-a4e1-7791df0c3c9b {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3083.203020] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3083.203020] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52bdea8e-b095-2515-f687-f3777d4f72a4" [ 3083.203020] env[61215]: _type = "Task" [ 3083.203020] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3083.210855] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52bdea8e-b095-2515-f687-f3777d4f72a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3083.713584] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52bdea8e-b095-2515-f687-f3777d4f72a4, 'name': SearchDatastore_Task, 'duration_secs': 0.009588} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3083.713914] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/f69db335-1cd0-4f6e-a5d1-8d90cfa58327 is no longer used. Deleting! [ 3083.714076] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/f69db335-1cd0-4f6e-a5d1-8d90cfa58327 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3083.714356] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c9288d6d-5da4-44f1-b48c-6982cd4a9b6a {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3083.720897] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3083.720897] env[61215]: value = "task-1690588" [ 3083.720897] env[61215]: _type = "Task" [ 3083.720897] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3083.728527] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690588, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3084.232104] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690588, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.097308} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3084.232470] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3084.232508] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/f69db335-1cd0-4f6e-a5d1-8d90cfa58327" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3084.232737] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/ab2e6442-d7e9-4ecb-b882-d6f422152b5e" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3084.232837] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/ab2e6442-d7e9-4ecb-b882-d6f422152b5e" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3084.233191] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/ab2e6442-d7e9-4ecb-b882-d6f422152b5e" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3084.233459] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89856e7e-c590-4d5d-bc44-3182dba93a8d {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3084.237602] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3084.237602] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52afaf0a-b1ae-3098-70a2-2fdcafc7f637" [ 3084.237602] env[61215]: _type = "Task" [ 3084.237602] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3084.244832] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52afaf0a-b1ae-3098-70a2-2fdcafc7f637, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3084.752137] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52afaf0a-b1ae-3098-70a2-2fdcafc7f637, 'name': SearchDatastore_Task, 'duration_secs': 0.009476} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3084.752506] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/ab2e6442-d7e9-4ecb-b882-d6f422152b5e is no longer used. Deleting! [ 3084.752660] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/ab2e6442-d7e9-4ecb-b882-d6f422152b5e {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3084.752929] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8daa5611-3220-46ff-930f-a706e56b8e2c {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3084.763536] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3084.763536] env[61215]: value = "task-1690589" [ 3084.763536] env[61215]: _type = "Task" [ 3084.763536] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3084.774585] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690589, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3085.273242] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690589, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.098838} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3085.273619] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3085.273619] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/ab2e6442-d7e9-4ecb-b882-d6f422152b5e" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3085.273828] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/8b33ecd1-b27f-45ab-a6b6-0568e78559a3" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3085.273948] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/8b33ecd1-b27f-45ab-a6b6-0568e78559a3" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3085.274312] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/8b33ecd1-b27f-45ab-a6b6-0568e78559a3" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3085.275026] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3213f65-f43b-4b71-b320-15830e598e24 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3085.278921] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3085.278921] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]521bb752-d334-0e13-7c2a-dc093f427e28" [ 3085.278921] env[61215]: _type = "Task" [ 3085.278921] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3085.286756] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]521bb752-d334-0e13-7c2a-dc093f427e28, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3085.789715] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]521bb752-d334-0e13-7c2a-dc093f427e28, 'name': SearchDatastore_Task, 'duration_secs': 0.009128} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3085.790051] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/8b33ecd1-b27f-45ab-a6b6-0568e78559a3 is no longer used. Deleting! [ 3085.790195] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/8b33ecd1-b27f-45ab-a6b6-0568e78559a3 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3085.790460] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c102d3f0-3c8a-4139-9c21-66ac31d5d7e9 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3085.796952] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3085.796952] env[61215]: value = "task-1690590" [ 3085.796952] env[61215]: _type = "Task" [ 3085.796952] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3085.805125] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690590, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3086.306530] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690590, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.101314} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3086.306877] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3086.306957] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/8b33ecd1-b27f-45ab-a6b6-0568e78559a3" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3086.307227] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/4febdd92-259a-457e-a4d8-7561e4e16673" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3086.307352] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/4febdd92-259a-457e-a4d8-7561e4e16673" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3086.307680] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/4febdd92-259a-457e-a4d8-7561e4e16673" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3086.307948] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8434ac0-b108-4246-b117-02bff11a39ce {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3086.311997] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3086.311997] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52218108-8768-4e99-91a5-4ebdc8ed282d" [ 3086.311997] env[61215]: _type = "Task" [ 3086.311997] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3086.319100] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52218108-8768-4e99-91a5-4ebdc8ed282d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3086.822486] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52218108-8768-4e99-91a5-4ebdc8ed282d, 'name': SearchDatastore_Task, 'duration_secs': 0.008743} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3086.822815] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/4febdd92-259a-457e-a4d8-7561e4e16673 is no longer used. Deleting! [ 3086.822962] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/4febdd92-259a-457e-a4d8-7561e4e16673 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3086.823250] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fb5ec8a1-4066-4cd9-bcdf-2978ee120b07 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3086.829437] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3086.829437] env[61215]: value = "task-1690591" [ 3086.829437] env[61215]: _type = "Task" [ 3086.829437] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3086.837592] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690591, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3087.339167] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690591, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.095993} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3087.339516] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3087.339558] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/4febdd92-259a-457e-a4d8-7561e4e16673" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3087.339779] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/11454281-dcc2-4f64-bf57-e038e4be4905" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3087.339901] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/11454281-dcc2-4f64-bf57-e038e4be4905" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3087.340243] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/11454281-dcc2-4f64-bf57-e038e4be4905" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3087.340527] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bce2c5f1-c0e4-417a-8050-fc63a7133846 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3087.344763] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3087.344763] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52cdb246-f147-32bf-e4bd-b3cb6a4a9c07" [ 3087.344763] env[61215]: _type = "Task" [ 3087.344763] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3087.351863] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52cdb246-f147-32bf-e4bd-b3cb6a4a9c07, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3087.855366] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52cdb246-f147-32bf-e4bd-b3cb6a4a9c07, 'name': SearchDatastore_Task, 'duration_secs': 0.008835} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3087.855696] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/11454281-dcc2-4f64-bf57-e038e4be4905 is no longer used. Deleting! [ 3087.855846] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/11454281-dcc2-4f64-bf57-e038e4be4905 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3087.856111] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-964b4e70-15e0-45cf-82ea-877d4dbd5399 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3087.862853] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3087.862853] env[61215]: value = "task-1690592" [ 3087.862853] env[61215]: _type = "Task" [ 3087.862853] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3087.870703] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690592, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3088.372736] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690592, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.098816} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3088.373179] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3088.373179] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/11454281-dcc2-4f64-bf57-e038e4be4905" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3088.373363] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/366265c2-ce0d-4c0a-8836-4cfc6f81c556" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3088.373494] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/366265c2-ce0d-4c0a-8836-4cfc6f81c556" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3088.373806] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/366265c2-ce0d-4c0a-8836-4cfc6f81c556" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3088.374076] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f3e20612-59c3-4f30-8e5b-b37ca9e5dfce {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3088.378435] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3088.378435] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52548d4c-98fa-b65a-adc3-4c838b9cbba4" [ 3088.378435] env[61215]: _type = "Task" [ 3088.378435] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3088.385616] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52548d4c-98fa-b65a-adc3-4c838b9cbba4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3088.888997] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52548d4c-98fa-b65a-adc3-4c838b9cbba4, 'name': SearchDatastore_Task, 'duration_secs': 0.008908} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3088.889317] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/366265c2-ce0d-4c0a-8836-4cfc6f81c556 is no longer used. Deleting! [ 3088.889465] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/366265c2-ce0d-4c0a-8836-4cfc6f81c556 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3088.889718] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-112ed631-fbbd-4650-84d9-b297027fa8e8 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3088.895739] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3088.895739] env[61215]: value = "task-1690593" [ 3088.895739] env[61215]: _type = "Task" [ 3088.895739] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3088.903030] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690593, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3089.406190] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690593, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.102183} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3089.406595] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3089.406595] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/366265c2-ce0d-4c0a-8836-4cfc6f81c556" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3089.406789] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/192b0bb1-c064-4d21-b4c3-e273fff13ed0" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3089.406912] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/192b0bb1-c064-4d21-b4c3-e273fff13ed0" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3089.407251] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/192b0bb1-c064-4d21-b4c3-e273fff13ed0" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3089.407512] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ae0099f-80f6-496d-9c72-26d9abb235b3 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3089.411794] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3089.411794] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]5298439f-06a2-5a9a-ade1-3fc82baaed68" [ 3089.411794] env[61215]: _type = "Task" [ 3089.411794] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3089.419061] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5298439f-06a2-5a9a-ade1-3fc82baaed68, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3089.922523] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]5298439f-06a2-5a9a-ade1-3fc82baaed68, 'name': SearchDatastore_Task, 'duration_secs': 0.009721} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3089.922854] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/192b0bb1-c064-4d21-b4c3-e273fff13ed0 is no longer used. Deleting! [ 3089.923016] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/192b0bb1-c064-4d21-b4c3-e273fff13ed0 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3089.923285] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d084d893-0671-48ef-82b3-32fd810599cb {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3089.929968] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3089.929968] env[61215]: value = "task-1690594" [ 3089.929968] env[61215]: _type = "Task" [ 3089.929968] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3089.938203] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690594, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3090.488685] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690594, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.102988} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3090.489082] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3090.489082] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/192b0bb1-c064-4d21-b4c3-e273fff13ed0" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3090.489300] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/6558bd44-9cf4-427c-90b9-86488d29c6c2" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3090.489438] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/6558bd44-9cf4-427c-90b9-86488d29c6c2" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3090.489779] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/6558bd44-9cf4-427c-90b9-86488d29c6c2" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3090.490091] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b576717a-1ce3-486f-ae43-3357c3e48be4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3090.502315] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3090.502315] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52116631-959f-57c8-6b25-12bb05dcd675" [ 3090.502315] env[61215]: _type = "Task" [ 3090.502315] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3090.513590] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52116631-959f-57c8-6b25-12bb05dcd675, 'name': SearchDatastore_Task, 'duration_secs': 0.00864} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3090.513897] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/6558bd44-9cf4-427c-90b9-86488d29c6c2 is no longer used. Deleting! [ 3090.514054] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/6558bd44-9cf4-427c-90b9-86488d29c6c2 {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3090.514371] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0de41dcc-b819-4c72-8d63-b4166654f8a2 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3090.526440] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3090.526440] env[61215]: value = "task-1690595" [ 3090.526440] env[61215]: _type = "Task" [ 3090.526440] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3090.534572] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690595, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3091.036418] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690595, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.106957} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3091.036634] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3091.036803] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/6558bd44-9cf4-427c-90b9-86488d29c6c2" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3091.037047] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/49b7d0ae-6bda-4b60-a220-0d2773864d8a" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3091.037178] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/49b7d0ae-6bda-4b60-a220-0d2773864d8a" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3091.037508] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/49b7d0ae-6bda-4b60-a220-0d2773864d8a" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3091.037783] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee8a8171-3b87-4fad-a5da-5e2ea237da7f {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3091.042070] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3091.042070] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]527f3aed-10a8-5638-63b6-4fc260d07066" [ 3091.042070] env[61215]: _type = "Task" [ 3091.042070] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3091.049825] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]527f3aed-10a8-5638-63b6-4fc260d07066, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3091.554299] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]527f3aed-10a8-5638-63b6-4fc260d07066, 'name': SearchDatastore_Task, 'duration_secs': 0.009216} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3091.557028] env[61215]: INFO nova.virt.vmwareapi.imagecache [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Image [datastore1] devstack-image-cache_base/49b7d0ae-6bda-4b60-a220-0d2773864d8a is no longer used. Deleting! [ 3091.557028] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleting the datastore file [datastore1] devstack-image-cache_base/49b7d0ae-6bda-4b60-a220-0d2773864d8a {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 3091.557028] env[61215]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fdc1353d-c7c9-410c-8b64-a50ef5b7303e {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3091.561637] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3091.561637] env[61215]: value = "task-1690596" [ 3091.561637] env[61215]: _type = "Task" [ 3091.561637] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3091.569174] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690596, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 3092.075012] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': task-1690596, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.105549} completed successfully. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 3092.077136] env[61215]: DEBUG nova.virt.vmwareapi.ds_util [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Deleted the datastore file {{(pid=61215) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 3092.077136] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Releasing lock "[datastore1] devstack-image-cache_base/49b7d0ae-6bda-4b60-a220-0d2773864d8a" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 3092.077136] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquiring lock "[datastore1] devstack-image-cache_base/c8c96f89-5cc1-4e4c-b1d5-92dadf473d5a" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 3092.077136] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired lock "[datastore1] devstack-image-cache_base/c8c96f89-5cc1-4e4c-b1d5-92dadf473d5a" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 3092.077136] env[61215]: DEBUG oslo_concurrency.lockutils [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Acquired external semaphore "[datastore1] devstack-image-cache_base/c8c96f89-5cc1-4e4c-b1d5-92dadf473d5a" {{(pid=61215) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 3092.077136] env[61215]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2af6552-ad29-456b-8534-a14493b619e4 {{(pid=61215) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 3092.081152] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Waiting for the task: (returnval){ [ 3092.081152] env[61215]: value = "session[520f0517-4d04-0e7d-b609-e256f8cf56da]52e8fd9b-e6f8-683b-3aa6-7a47c899339f" [ 3092.081152] env[61215]: _type = "Task" [ 3092.081152] env[61215]: } to complete. {{(pid=61215) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 3092.088531] env[61215]: DEBUG oslo_vmware.api [None req-ecf891db-b852-41bd-8230-adfeeff04fc9 None None] Task: {'id': session[520f0517-4d04-0e7d-b609-e256f8cf56da]52e8fd9b-e6f8-683b-3aa6-7a47c899339f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61215) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}}